]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model-manager.cc
analyzer: fix memory leaks
[thirdparty/gcc.git] / gcc / analyzer / region-model-manager.cc
CommitLineData
808f4dfe 1/* Consolidation of svalues and regions.
7adcbafe 2 Copyright (C) 2020-2022 Free Software Foundation, Inc.
808f4dfe
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tree.h"
25#include "diagnostic-core.h"
26#include "gimple-pretty-print.h"
27#include "function.h"
28#include "basic-block.h"
29#include "gimple.h"
30#include "gimple-iterator.h"
31#include "diagnostic-core.h"
32#include "graphviz.h"
33#include "options.h"
34#include "cgraph.h"
35#include "tree-dfa.h"
36#include "stringpool.h"
37#include "convert.h"
38#include "target.h"
39#include "fold-const.h"
40#include "tree-pretty-print.h"
41#include "tristate.h"
42#include "bitmap.h"
43#include "selftest.h"
44#include "function.h"
809192e7 45#include "json.h"
808f4dfe
DM
46#include "analyzer/analyzer.h"
47#include "analyzer/analyzer-logging.h"
48#include "ordered-hash-map.h"
49#include "options.h"
50#include "cgraph.h"
51#include "cfg.h"
52#include "digraph.h"
53#include "analyzer/supergraph.h"
54#include "sbitmap.h"
55#include "analyzer/call-string.h"
56#include "analyzer/program-point.h"
57#include "analyzer/store.h"
58#include "analyzer/region-model.h"
8ca7fa84 59#include "analyzer/constraint-manager.h"
808f4dfe
DM
60
61#if ENABLE_ANALYZER
62
63namespace ana {
64
65/* class region_model_manager. */
66
67/* region_model_manager's ctor. */
68
11a2ff8d
DM
69region_model_manager::region_model_manager (logger *logger)
70: m_logger (logger),
71 m_next_region_id (0),
808f4dfe
DM
72 m_root_region (alloc_region_id ()),
73 m_stack_region (alloc_region_id (), &m_root_region),
74 m_heap_region (alloc_region_id (), &m_root_region),
75 m_unknown_NULL (NULL),
4f34f8cc 76 m_checking_feasibility (false),
808f4dfe
DM
77 m_max_complexity (0, 0),
78 m_code_region (alloc_region_id (), &m_root_region),
79 m_fndecls_map (), m_labels_map (),
80 m_globals_region (alloc_region_id (), &m_root_region),
81 m_globals_map (),
8ca7fa84
DM
82 m_store_mgr (this),
83 m_range_mgr (new bounded_ranges_manager ())
808f4dfe
DM
84{
85}
86
87/* region_model_manager's dtor. Delete all of the managed svalues
88 and regions. */
89
90region_model_manager::~region_model_manager ()
91{
92 /* Delete consolidated svalues. */
93 for (constants_map_t::iterator iter = m_constants_map.begin ();
94 iter != m_constants_map.end (); ++iter)
95 delete (*iter).second;
96 for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
97 iter != m_unknowns_map.end (); ++iter)
98 delete (*iter).second;
99 delete m_unknown_NULL;
808f4dfe
DM
100 for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
101 iter != m_poisoned_values_map.end (); ++iter)
102 delete (*iter).second;
99988b0e
DM
103 for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
104 iter != m_setjmp_values_map.end (); ++iter)
105 delete (*iter).second;
808f4dfe
DM
106 for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
107 iter != m_initial_values_map.end (); ++iter)
108 delete (*iter).second;
109 for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
110 iter != m_pointer_values_map.end (); ++iter)
111 delete (*iter).second;
112 for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
113 iter != m_unaryop_values_map.end (); ++iter)
114 delete (*iter).second;
115 for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
116 iter != m_binop_values_map.end (); ++iter)
117 delete (*iter).second;
118 for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
119 iter != m_sub_values_map.end (); ++iter)
120 delete (*iter).second;
99988b0e
DM
121 for (auto iter : m_repeated_values_map)
122 delete iter.second;
123 for (auto iter : m_bits_within_values_map)
124 delete iter.second;
808f4dfe
DM
125 for (unmergeable_values_map_t::iterator iter
126 = m_unmergeable_values_map.begin ();
127 iter != m_unmergeable_values_map.end (); ++iter)
128 delete (*iter).second;
129 for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
130 iter != m_widening_values_map.end (); ++iter)
131 delete (*iter).second;
132 for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
133 iter != m_compound_values_map.end (); ++iter)
134 delete (*iter).second;
135 for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
136 iter != m_conjured_values_map.end (); ++iter)
137 delete (*iter).second;
99988b0e
DM
138 for (auto iter : m_asm_output_values_map)
139 delete iter.second;
140 for (auto iter : m_const_fn_result_values_map)
141 delete iter.second;
808f4dfe
DM
142
143 /* Delete consolidated regions. */
144 for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
145 iter != m_fndecls_map.end (); ++iter)
146 delete (*iter).second;
147 for (labels_map_t::iterator iter = m_labels_map.begin ();
148 iter != m_labels_map.end (); ++iter)
149 delete (*iter).second;
150 for (globals_map_t::iterator iter = m_globals_map.begin ();
151 iter != m_globals_map.end (); ++iter)
152 delete (*iter).second;
153 for (string_map_t::iterator iter = m_string_map.begin ();
154 iter != m_string_map.end (); ++iter)
155 delete (*iter).second;
8ca7fa84
DM
156
157 delete m_range_mgr;
808f4dfe
DM
158}
159
160/* Return true if C exceeds the complexity limit for svalues. */
161
162bool
163region_model_manager::too_complex_p (const complexity &c) const
164{
165 if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
166 return true;
167 return false;
168}
169
170/* If SVAL exceeds the complexity limit for svalues, delete it
171 and return true.
172 Otherwise update m_max_complexity and return false. */
173
174bool
175region_model_manager::reject_if_too_complex (svalue *sval)
176{
4f34f8cc 177 if (m_checking_feasibility)
60933a14
DM
178 return false;
179
808f4dfe
DM
180 const complexity &c = sval->get_complexity ();
181 if (!too_complex_p (c))
182 {
183 if (m_max_complexity.m_num_nodes < c.m_num_nodes)
184 m_max_complexity.m_num_nodes = c.m_num_nodes;
185 if (m_max_complexity.m_max_depth < c.m_max_depth)
186 m_max_complexity.m_max_depth = c.m_max_depth;
187 return false;
188 }
189
190 delete sval;
191 return true;
192}
193
194/* Macro for imposing a complexity limit on svalues, for use within
195 region_model_manager member functions.
196
197 If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
198 value of the same type.
199 Otherwise update m_max_complexity and carry on. */
200
201#define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
202 do { \
203 svalue *sval_ = (SVAL); \
204 tree type_ = sval_->get_type (); \
205 if (reject_if_too_complex (sval_)) \
206 return get_or_create_unknown_svalue (type_); \
207 } while (0)
208
209/* svalue consolidation. */
210
211/* Return the svalue * for a constant_svalue for CST_EXPR,
212 creating it if necessary.
213 The constant_svalue instances are reused, based on pointer equality
214 of trees */
215
216const svalue *
217region_model_manager::get_or_create_constant_svalue (tree cst_expr)
218{
219 gcc_assert (cst_expr);
2aefe248 220 gcc_assert (CONSTANT_CLASS_P (cst_expr));
808f4dfe
DM
221
222 constant_svalue **slot = m_constants_map.get (cst_expr);
223 if (slot)
224 return *slot;
225 constant_svalue *cst_sval = new constant_svalue (cst_expr);
226 RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
227 m_constants_map.put (cst_expr, cst_sval);
228 return cst_sval;
229}
230
1aff29d4
DM
231/* Return the svalue * for a constant_svalue for the INTEGER_CST
232 for VAL of type TYPE, creating it if necessary. */
233
234const svalue *
235region_model_manager::get_or_create_int_cst (tree type, poly_int64 val)
236{
237 gcc_assert (type);
238 tree tree_cst = build_int_cst (type, val);
239 return get_or_create_constant_svalue (tree_cst);
240}
241
808f4dfe
DM
242/* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
243 creating it if necessary.
244 The unknown_svalue instances are reused, based on pointer equality
245 of the types */
246
247const svalue *
248region_model_manager::get_or_create_unknown_svalue (tree type)
249{
4f34f8cc
DM
250 /* Don't create unknown values when doing feasibility testing;
251 instead, create a unique svalue. */
252 if (m_checking_feasibility)
253 return create_unique_svalue (type);
254
808f4dfe
DM
255 /* Special-case NULL, so that the hash_map can use NULL as the
256 "empty" value. */
257 if (type == NULL_TREE)
258 {
259 if (!m_unknown_NULL)
260 m_unknown_NULL = new unknown_svalue (type);
261 return m_unknown_NULL;
262 }
263
264 unknown_svalue **slot = m_unknowns_map.get (type);
265 if (slot)
266 return *slot;
267 unknown_svalue *sval = new unknown_svalue (type);
268 m_unknowns_map.put (type, sval);
269 return sval;
270}
271
4f34f8cc
DM
272/* Return a freshly-allocated svalue of TYPE, owned by this manager. */
273
274const svalue *
275region_model_manager::create_unique_svalue (tree type)
276{
277 svalue *sval = new placeholder_svalue (type, "unique");
278 m_managed_dynamic_svalues.safe_push (sval);
279 return sval;
280}
281
808f4dfe
DM
282/* Return the svalue * for the initial value of REG, creating it if
283 necessary. */
284
285const svalue *
286region_model_manager::get_or_create_initial_value (const region *reg)
287{
33255ad3
DM
288 if (!reg->can_have_initial_svalue_p ())
289 return get_or_create_poisoned_svalue (POISON_KIND_UNINIT,
290 reg->get_type ());
291
808f4dfe
DM
292 /* The initial value of a cast is a cast of the initial value. */
293 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
294 {
295 const region *original_reg = cast_reg->get_original_region ();
296 return get_or_create_cast (cast_reg->get_type (),
297 get_or_create_initial_value (original_reg));
298 }
299
11d4ec5d
DM
300 /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
301 if (reg->symbolic_for_unknown_ptr_p ())
302 return get_or_create_unknown_svalue (reg->get_type ());
303
808f4dfe
DM
304 if (initial_svalue **slot = m_initial_values_map.get (reg))
305 return *slot;
306 initial_svalue *initial_sval = new initial_svalue (reg->get_type (), reg);
307 RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
308 m_initial_values_map.put (reg, initial_sval);
309 return initial_sval;
310}
311
312/* Return the svalue * for R using type TYPE, creating it if
313 necessary. */
314
315const svalue *
316region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
317 tree type)
318{
319 setjmp_svalue::key_t key (r, type);
320 if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
321 return *slot;
322 setjmp_svalue *setjmp_sval = new setjmp_svalue (r, type);
323 RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
324 m_setjmp_values_map.put (key, setjmp_sval);
325 return setjmp_sval;
326}
327
328/* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
329 necessary. */
330
331const svalue *
332region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
333 tree type)
334{
335 poisoned_svalue::key_t key (kind, type);
336 if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
337 return *slot;
338 poisoned_svalue *poisoned_sval = new poisoned_svalue (kind, type);
339 RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
340 m_poisoned_values_map.put (key, poisoned_sval);
341 return poisoned_sval;
342}
343
344/* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
345 creating it if necessary. */
346
347const svalue *
348region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
349{
350 /* If this is a symbolic region from dereferencing a pointer, and the types
351 match, then return the original pointer. */
352 if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
353 if (ptr_type == sym_reg->get_pointer ()->get_type ())
354 return sym_reg->get_pointer ();
355
356 region_svalue::key_t key (ptr_type, pointee);
357 if (region_svalue **slot = m_pointer_values_map.get (key))
358 return *slot;
359 region_svalue *sval = new region_svalue (ptr_type, pointee);
360 RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
361 m_pointer_values_map.put (key, sval);
362 return sval;
363}
364
365/* Subroutine of region_model_manager::get_or_create_unaryop.
366 Attempt to fold the inputs and return a simpler svalue *.
367 Otherwise, return NULL. */
368
369const svalue *
370region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
371 const svalue *arg)
372{
373 /* Ops on "unknown" are also unknown. */
374 if (arg->get_kind () == SK_UNKNOWN)
375 return get_or_create_unknown_svalue (type);
a113b143
DM
376 /* Likewise for "poisoned". */
377 else if (const poisoned_svalue *poisoned_sval
378 = arg->dyn_cast_poisoned_svalue ())
379 return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
380 type);
381
382 gcc_assert (arg->can_have_associated_state_p ());
808f4dfe
DM
383
384 switch (op)
385 {
386 default: break;
ecdb9322 387 case VIEW_CONVERT_EXPR:
808f4dfe
DM
388 case NOP_EXPR:
389 {
390 /* Handle redundant casts. */
391 if (arg->get_type ()
392 && useless_type_conversion_p (arg->get_type (), type))
393 return arg;
394
395 /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
396 => "cast<TYPE> (innermost_arg)",
397 unless INNER_TYPE is narrower than TYPE. */
398 if (const svalue *innermost_arg = arg->maybe_undo_cast ())
399 {
400 tree inner_type = arg->get_type ();
401 if (TYPE_SIZE (type)
402 && TYPE_SIZE (inner_type)
403 && (fold_binary (LE_EXPR, boolean_type_node,
404 TYPE_SIZE (type), TYPE_SIZE (inner_type))
405 == boolean_true_node))
406 return maybe_fold_unaryop (type, op, innermost_arg);
407 }
111fd515
DM
408 /* Avoid creating symbolic regions for pointer casts by
409 simplifying (T*)(&REGION) to ((T*)&REGION). */
410 if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
411 if (POINTER_TYPE_P (type)
412 && region_sval->get_type ()
413 && POINTER_TYPE_P (region_sval->get_type ()))
414 return get_ptr_svalue (type, region_sval->get_pointee ());
808f4dfe
DM
415 }
416 break;
417 case TRUTH_NOT_EXPR:
418 {
419 /* Invert comparisons e.g. "!(x == y)" => "x != y". */
420 if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
421 if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
422 {
423 enum tree_code inv_op
424 = invert_tree_comparison (binop->get_op (),
425 HONOR_NANS (binop->get_type ()));
426 if (inv_op != ERROR_MARK)
427 return get_or_create_binop (binop->get_type (), inv_op,
428 binop->get_arg0 (),
429 binop->get_arg1 ());
430 }
431 }
432 break;
433 }
434
435 /* Constants. */
436 if (tree cst = arg->maybe_get_constant ())
437 if (tree result = fold_unary (op, type, cst))
2aefe248
DM
438 {
439 if (CONSTANT_CLASS_P (result))
440 return get_or_create_constant_svalue (result);
441
442 /* fold_unary can return casts of constants; try to handle them. */
443 if (op != NOP_EXPR
444 && type
445 && TREE_CODE (result) == NOP_EXPR
446 && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
447 {
448 const svalue *inner_cst
449 = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
450 return get_or_create_cast (type,
451 get_or_create_cast (TREE_TYPE (result),
452 inner_cst));
453 }
454 }
808f4dfe
DM
455
456 return NULL;
457}
458
459/* Return the svalue * for an unary operation OP on ARG with a result of
460 type TYPE, creating it if necessary. */
461
462const svalue *
463region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
464 const svalue *arg)
465{
466 if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
467 return folded;
468 unaryop_svalue::key_t key (type, op, arg);
469 if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
470 return *slot;
471 unaryop_svalue *unaryop_sval = new unaryop_svalue (type, op, arg);
472 RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
473 m_unaryop_values_map.put (key, unaryop_sval);
474 return unaryop_sval;
475}
476
ecdb9322
DM
477/* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
478 Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
479 of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
480 and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
481 on. */
482
483static enum tree_code
484get_code_for_cast (tree dst_type, tree src_type)
485{
486 gcc_assert (dst_type);
487 if (!src_type)
488 return NOP_EXPR;
489
490 if (TREE_CODE (src_type) == REAL_TYPE)
491 {
492 if (TREE_CODE (dst_type) == INTEGER_TYPE)
493 return FIX_TRUNC_EXPR;
494 else
495 return VIEW_CONVERT_EXPR;
496 }
497
498 return NOP_EXPR;
499}
500
808f4dfe
DM
501/* Return the svalue * for a cast of ARG to type TYPE, creating it
502 if necessary. */
503
504const svalue *
505region_model_manager::get_or_create_cast (tree type, const svalue *arg)
506{
366bd1ac 507 gcc_assert (type);
45b999f6
DM
508
509 /* No-op if the types are the same. */
510 if (type == arg->get_type ())
511 return arg;
512
513 /* Don't attempt to handle casts involving vector types for now. */
514 if (TREE_CODE (type) == VECTOR_TYPE
515 || (arg->get_type ()
516 && TREE_CODE (arg->get_type ()) == VECTOR_TYPE))
517 return get_or_create_unknown_svalue (type);
518
ecdb9322
DM
519 enum tree_code op = get_code_for_cast (type, arg->get_type ());
520 return get_or_create_unaryop (type, op, arg);
808f4dfe
DM
521}
522
ec3fafa9
DM
523/* Subroutine of region_model_manager::maybe_fold_binop for handling
524 (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
525 optimize_bit_field_compare, where CST is from ARG1.
526
527 Support masking out bits from a compound_svalue for comparing a bitfield
528 against a value, as generated by optimize_bit_field_compare for
529 BITFIELD == VALUE.
530
531 If COMPOUND_SVAL has a value for the appropriate bits, return it,
532 shifted accordingly.
533 Otherwise return NULL. */
534
535const svalue *
536region_model_manager::
537maybe_undo_optimize_bit_field_compare (tree type,
538 const compound_svalue *compound_sval,
539 tree cst,
540 const svalue *arg1)
541{
542 if (type != unsigned_char_type_node)
543 return NULL;
544
545 const binding_map &map = compound_sval->get_map ();
546 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
547 /* If "mask" is a contiguous range of set bits, see if the
548 compound_sval has a value for those bits. */
549 bit_range bits (0, 0);
550 if (!bit_range::from_mask (mask, &bits))
551 return NULL;
552
553 bit_range bound_bits (bits);
554 if (BYTES_BIG_ENDIAN)
555 bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
556 bits.m_size_in_bits);
557 const concrete_binding *conc
e61ffa20 558 = get_store_manager ()->get_concrete_binding (bound_bits);
ec3fafa9
DM
559 const svalue *sval = map.get (conc);
560 if (!sval)
561 return NULL;
562
563 /* We have a value;
564 shift it by the correct number of bits. */
565 const svalue *lhs = get_or_create_cast (type, sval);
566 HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
1aff29d4 567 const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
ec3fafa9
DM
568 const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
569 lhs, shift_sval);
570 /* Reapply the mask (needed for negative
571 signed bitfields). */
572 return get_or_create_binop (type, BIT_AND_EXPR,
573 shifted_sval, arg1);
574}
575
808f4dfe
DM
576/* Subroutine of region_model_manager::get_or_create_binop.
577 Attempt to fold the inputs and return a simpler svalue *.
578 Otherwise, return NULL. */
579
580const svalue *
581region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
582 const svalue *arg0,
583 const svalue *arg1)
584{
585 tree cst0 = arg0->maybe_get_constant ();
586 tree cst1 = arg1->maybe_get_constant ();
587 /* (CST OP CST). */
588 if (cst0 && cst1)
589 {
590 if (tree result = fold_binary (op, type, cst0, cst1))
591 if (CONSTANT_CLASS_P (result))
592 return get_or_create_constant_svalue (result);
593 }
594
595 if (FLOAT_TYPE_P (type)
596 || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
597 || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
598 return NULL;
599
600 switch (op)
601 {
602 default:
603 break;
604 case POINTER_PLUS_EXPR:
605 case PLUS_EXPR:
606 /* (VAL + 0) -> VAL. */
607 if (cst1 && zerop (cst1) && type == arg0->get_type ())
608 return arg0;
609 break;
610 case MINUS_EXPR:
611 /* (VAL - 0) -> VAL. */
612 if (cst1 && zerop (cst1) && type == arg0->get_type ())
613 return arg0;
614 break;
615 case MULT_EXPR:
616 /* (VAL * 0). */
fc02b568 617 if (cst1 && zerop (cst1) && INTEGRAL_TYPE_P (type))
808f4dfe
DM
618 return get_or_create_constant_svalue (build_int_cst (type, 0));
619 /* (VAL * 1) -> VAL. */
620 if (cst1 && integer_onep (cst1))
621 return arg0;
622 break;
df2b78d4
DM
623 case BIT_AND_EXPR:
624 if (cst1)
d3b1ef7a
DM
625 {
626 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
627 /* "(ARG0 & 0)" -> "0". */
628 return get_or_create_constant_svalue (build_int_cst (type, 0));
629
d3b1ef7a
DM
630 if (const compound_svalue *compound_sval
631 = arg0->dyn_cast_compound_svalue ())
ec3fafa9
DM
632 if (const svalue *sval
633 = maybe_undo_optimize_bit_field_compare (type,
634 compound_sval,
635 cst1, arg1))
636 return sval;
d3b1ef7a 637 }
4f34f8cc
DM
638 if (arg0->get_type () == boolean_type_node
639 && arg1->get_type () == boolean_type_node)
640 {
641 /* If the LHS are both _Bool, then... */
642 /* ..."(1 & x) -> x". */
643 if (cst0 && !zerop (cst0))
644 return get_or_create_cast (type, arg1);
645 /* ..."(x & 1) -> x". */
646 if (cst1 && !zerop (cst1))
647 return get_or_create_cast (type, arg0);
648 /* ..."(0 & x) -> 0". */
649 if (cst0 && zerop (cst0))
650 return get_or_create_int_cst (type, 0);
651 /* ..."(x & 0) -> 0". */
652 if (cst1 && zerop (cst1))
653 return get_or_create_int_cst (type, 0);
654 }
655 break;
656 case BIT_IOR_EXPR:
657 if (arg0->get_type () == boolean_type_node
658 && arg1->get_type () == boolean_type_node)
659 {
660 /* If the LHS are both _Bool, then... */
661 /* ..."(1 | x) -> 1". */
662 if (cst0 && !zerop (cst0))
663 return get_or_create_int_cst (type, 1);
664 /* ..."(x | 1) -> 1". */
665 if (cst1 && !zerop (cst1))
666 return get_or_create_int_cst (type, 1);
667 /* ..."(0 | x) -> x". */
668 if (cst0 && zerop (cst0))
669 return get_or_create_cast (type, arg1);
670 /* ..."(x | 0) -> x". */
671 if (cst1 && zerop (cst1))
672 return get_or_create_cast (type, arg0);
673 }
df2b78d4 674 break;
808f4dfe
DM
675 case TRUTH_ANDIF_EXPR:
676 case TRUTH_AND_EXPR:
677 if (cst1)
678 {
fc02b568 679 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
808f4dfe
DM
680 /* "(ARG0 && 0)" -> "0". */
681 return get_or_create_constant_svalue (build_int_cst (type, 0));
682 else
683 /* "(ARG0 && nonzero-cst)" -> "ARG0". */
684 return get_or_create_cast (type, arg0);
685 }
686 break;
687 case TRUTH_ORIF_EXPR:
688 case TRUTH_OR_EXPR:
689 if (cst1)
690 {
691 if (zerop (cst1))
692 /* "(ARG0 || 0)" -> "ARG0". */
693 return get_or_create_cast (type, arg0);
694 else
695 /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
696 return get_or_create_cast (type, arg1);
697 }
698 break;
699 }
700
701 /* For associative ops, fold "(X op CST_A) op CST_B)" to
702 "X op (CST_A op CST_B)". */
703 if (cst1 && associative_tree_code (op))
704 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
705 if (binop->get_op () == op
706 && binop->get_arg1 ()->maybe_get_constant ()
707 && type == binop->get_type ()
708 && type == binop->get_arg0 ()->get_type ()
709 && type == binop->get_arg1 ()->get_type ())
710 return get_or_create_binop
711 (type, op, binop->get_arg0 (),
712 get_or_create_binop (type, op,
713 binop->get_arg1 (), arg1));
714
715 /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
716 can fold:
717 "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
718 e.g. in data-model-1.c: test_4c. */
719 if (cst1 && op == POINTER_PLUS_EXPR)
720 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
721 if (binop->get_op () == POINTER_PLUS_EXPR)
722 if (binop->get_arg1 ()->maybe_get_constant ())
723 return get_or_create_binop
724 (type, op, binop->get_arg0 (),
725 get_or_create_binop (size_type_node, op,
726 binop->get_arg1 (), arg1));
727
808f4dfe
DM
728 /* etc. */
729
730 return NULL;
731}
732
733/* Return the svalue * for an binary operation OP on ARG0 and ARG1
734 with a result of type TYPE, creating it if necessary. */
735
736const svalue *
737region_model_manager::get_or_create_binop (tree type, enum tree_code op,
738 const svalue *arg0,
739 const svalue *arg1)
740{
741 /* For commutative ops, put any constant on the RHS. */
742 if (arg0->maybe_get_constant () && commutative_tree_code (op))
743 std::swap (arg0, arg1);
744
745 if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
746 return folded;
747
a113b143
DM
748 /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
749 it via an identity in maybe_fold_binop). */
750 if (!arg0->can_have_associated_state_p ()
751 || !arg1->can_have_associated_state_p ())
752 return get_or_create_unknown_svalue (type);
753
808f4dfe
DM
754 binop_svalue::key_t key (type, op, arg0, arg1);
755 if (binop_svalue **slot = m_binop_values_map.get (key))
756 return *slot;
757 binop_svalue *binop_sval = new binop_svalue (type, op, arg0, arg1);
758 RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
759 m_binop_values_map.put (key, binop_sval);
760 return binop_sval;
761}
762
763/* Subroutine of region_model_manager::get_or_create_sub_svalue.
764 Return a folded svalue, or NULL. */
765
766const svalue *
767region_model_manager::maybe_fold_sub_svalue (tree type,
768 const svalue *parent_svalue,
769 const region *subregion)
770{
a113b143
DM
771 /* Subvalues of "unknown"/"poisoned" are unknown. */
772 if (!parent_svalue->can_have_associated_state_p ())
808f4dfe
DM
773 return get_or_create_unknown_svalue (type);
774
775 /* If we have a subregion of a zero-fill, it's zero. */
776 if (const unaryop_svalue *unary
777 = parent_svalue->dyn_cast_unaryop_svalue ())
778 {
ecdb9322
DM
779 if (unary->get_op () == NOP_EXPR
780 || unary->get_op () == VIEW_CONVERT_EXPR)
808f4dfe 781 if (tree cst = unary->get_arg ()->maybe_get_constant ())
84832cab 782 if (zerop (cst) && type)
808f4dfe
DM
783 {
784 const svalue *cst_sval
785 = get_or_create_constant_svalue (cst);
786 return get_or_create_cast (type, cst_sval);
787 }
788 }
789
790 /* Handle getting individual chars from a STRING_CST. */
791 if (tree cst = parent_svalue->maybe_get_constant ())
792 if (TREE_CODE (cst) == STRING_CST)
2ac7b19f
DM
793 {
794 /* If we have a concrete 1-byte access within the parent region... */
795 byte_range subregion_bytes (0, 0);
796 if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
84832cab
DM
797 && subregion_bytes.m_size_in_bytes == 1
798 && type)
2ac7b19f
DM
799 {
800 /* ...then attempt to get that char from the STRING_CST. */
801 HOST_WIDE_INT hwi_start_byte
802 = subregion_bytes.m_start_byte_offset.to_shwi ();
803 tree cst_idx
804 = build_int_cst_type (size_type_node, hwi_start_byte);
808f4dfe
DM
805 if (const svalue *char_sval
806 = maybe_get_char_from_string_cst (cst, cst_idx))
807 return get_or_create_cast (type, char_sval);
2ac7b19f
DM
808 }
809 }
808f4dfe 810
808f4dfe 811 if (const initial_svalue *init_sval
e61ffa20 812 = parent_svalue->dyn_cast_initial_svalue ())
808f4dfe 813 {
e61ffa20
DM
814 /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
815 i.e.
816 Subvalue(InitialValue(R1), FieldRegion(R2, F))
817 -> InitialValue(FieldRegion(R1, F)). */
808f4dfe
DM
818 if (const field_region *field_reg = subregion->dyn_cast_field_region ())
819 {
820 const region *field_reg_new
821 = get_field_region (init_sval->get_region (),
822 field_reg->get_field ());
823 return get_or_create_initial_value (field_reg_new);
824 }
e61ffa20
DM
825 /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
826 i.e.
827 Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
828 -> InitialValue(ElementRegion(R1, IDX)). */
829 if (const element_region *element_reg = subregion->dyn_cast_element_region ())
830 {
831 const region *element_reg_new
832 = get_element_region (init_sval->get_region (),
833 element_reg->get_type (),
834 element_reg->get_index ());
835 return get_or_create_initial_value (element_reg_new);
836 }
808f4dfe
DM
837 }
838
e61ffa20
DM
839 if (const repeated_svalue *repeated_sval
840 = parent_svalue->dyn_cast_repeated_svalue ())
79e746bb
DM
841 if (type)
842 return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
e61ffa20 843
808f4dfe
DM
844 return NULL;
845}
846
847/* Return the svalue * for extracting a subvalue of type TYPE from
848 PARENT_SVALUE based on SUBREGION, creating it if necessary. */
849
850const svalue *
851region_model_manager::get_or_create_sub_svalue (tree type,
852 const svalue *parent_svalue,
853 const region *subregion)
854{
855 if (const svalue *folded
856 = maybe_fold_sub_svalue (type, parent_svalue, subregion))
857 return folded;
858
859 sub_svalue::key_t key (type, parent_svalue, subregion);
860 if (sub_svalue **slot = m_sub_values_map.get (key))
861 return *slot;
862 sub_svalue *sub_sval
863 = new sub_svalue (type, parent_svalue, subregion);
864 RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
865 m_sub_values_map.put (key, sub_sval);
866 return sub_sval;
867}
868
e61ffa20
DM
869/* Subroutine of region_model_manager::get_or_create_repeated_svalue.
870 Return a folded svalue, or NULL. */
871
872const svalue *
873region_model_manager::maybe_fold_repeated_svalue (tree type,
874 const svalue *outer_size,
875 const svalue *inner_svalue)
876{
a113b143
DM
877 /* Repeated "unknown"/"poisoned" is unknown. */
878 if (!outer_size->can_have_associated_state_p ()
879 || !inner_svalue->can_have_associated_state_p ())
880 return get_or_create_unknown_svalue (type);
881
e61ffa20
DM
882 /* If INNER_SVALUE is the same size as OUTER_SIZE,
883 turn into simply a cast. */
884 if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
885 {
886 HOST_WIDE_INT num_bytes_inner_svalue
887 = int_size_in_bytes (inner_svalue->get_type ());
888 if (num_bytes_inner_svalue != -1)
889 if (num_bytes_inner_svalue
890 == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
891 {
892 if (type)
893 return get_or_create_cast (type, inner_svalue);
894 else
895 return inner_svalue;
896 }
897 }
898
899 /* Handle zero-fill of a specific type. */
900 if (tree cst = inner_svalue->maybe_get_constant ())
901 if (zerop (cst) && type)
902 return get_or_create_cast (type, inner_svalue);
903
904 return NULL;
905}
906
907/* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
908 enough times to be of size OUTER_SIZE, creating it if necessary.
909 e.g. for filling buffers with a constant value. */
910
911const svalue *
912region_model_manager::get_or_create_repeated_svalue (tree type,
913 const svalue *outer_size,
914 const svalue *inner_svalue)
915{
916 if (const svalue *folded
917 = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
918 return folded;
919
920 repeated_svalue::key_t key (type, outer_size, inner_svalue);
921 if (repeated_svalue **slot = m_repeated_values_map.get (key))
922 return *slot;
923 repeated_svalue *repeated_sval
924 = new repeated_svalue (type, outer_size, inner_svalue);
925 RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
926 m_repeated_values_map.put (key, repeated_sval);
927 return repeated_sval;
928}
929
930/* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
931 Return true and write the result to OUT if successful.
932 Return false otherwise. */
933
934static bool
935get_bit_range_for_field (tree field, bit_range *out)
936{
937 bit_size_t bit_size;
938 if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
939 return false;
940 int field_bit_offset = int_bit_position (field);
941 *out = bit_range (field_bit_offset, bit_size);
942 return true;
943}
944
945/* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
946 Return true and write the result to OUT if successful.
947 Return false otherwise. */
948
949static bool
950get_byte_range_for_field (tree field, byte_range *out)
951{
952 bit_range field_bits (0, 0);
953 if (!get_bit_range_for_field (field, &field_bits))
954 return false;
955 return field_bits.as_byte_range (out);
956}
957
958/* Attempt to determine if there is a specific field within RECORD_TYPE
959 at BYTES. If so, return it, and write the location of BYTES relative
960 to the field to *OUT_RANGE_WITHIN_FIELD.
961 Otherwise, return NULL_TREE.
962 For example, given:
963 struct foo { uint32 a; uint32; b};
964 and
965 bytes = {bytes 6-7} (of foo)
966 we have bytes 3-4 of field b. */
967
968static tree
969get_field_at_byte_range (tree record_type, const byte_range &bytes,
970 byte_range *out_range_within_field)
971{
972 bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
973
974 tree field = get_field_at_bit_offset (record_type, bit_offset);
975 if (!field)
976 return NULL_TREE;
977
978 byte_range field_bytes (0,0);
979 if (!get_byte_range_for_field (field, &field_bytes))
980 return NULL_TREE;
981
982 /* Is BYTES fully within field_bytes? */
983 byte_range bytes_within_field (0,0);
984 if (!field_bytes.contains_p (bytes, &bytes_within_field))
985 return NULL_TREE;
986
987 *out_range_within_field = bytes_within_field;
988 return field;
989}
990
991/* Subroutine of region_model_manager::get_or_create_bits_within.
992 Return a folded svalue, or NULL. */
993
994const svalue *
995region_model_manager::maybe_fold_bits_within_svalue (tree type,
996 const bit_range &bits,
997 const svalue *inner_svalue)
998{
999 tree inner_type = inner_svalue->get_type ();
1000 /* Fold:
1001 BITS_WITHIN ((0, sizeof (VAL), VAL))
1002 to:
1003 CAST(TYPE, VAL). */
1004 if (bits.m_start_bit_offset == 0 && inner_type)
1005 {
1006 bit_size_t inner_type_size;
1007 if (int_size_in_bits (inner_type, &inner_type_size))
1008 if (inner_type_size == bits.m_size_in_bits)
1009 {
1010 if (type)
1011 return get_or_create_cast (type, inner_svalue);
1012 else
1013 return inner_svalue;
1014 }
1015 }
1016
1017 /* Kind-specific folding. */
1018 if (const svalue *sval
1019 = inner_svalue->maybe_fold_bits_within (type, bits, this))
1020 return sval;
1021
1022 byte_range bytes (0,0);
1023 if (bits.as_byte_range (&bytes) && inner_type)
1024 switch (TREE_CODE (inner_type))
1025 {
1026 default:
1027 break;
1028 case ARRAY_TYPE:
1029 {
1030 /* Fold:
1031 BITS_WITHIN (range, KIND(REG))
1032 to:
1033 BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1034 if range1 is a byte-range fully within one ELEMENT. */
1035 tree element_type = TREE_TYPE (inner_type);
1036 HOST_WIDE_INT element_byte_size
1037 = int_size_in_bytes (element_type);
1038 if (element_byte_size > 0)
1039 {
1040 HOST_WIDE_INT start_idx
1041 = (bytes.get_start_byte_offset ().to_shwi ()
1042 / element_byte_size);
1043 HOST_WIDE_INT last_idx
1044 = (bytes.get_last_byte_offset ().to_shwi ()
1045 / element_byte_size);
1046 if (start_idx == last_idx)
1047 {
1048 if (const initial_svalue *initial_sval
1049 = inner_svalue->dyn_cast_initial_svalue ())
1050 {
1051 bit_offset_t start_of_element
1052 = start_idx * element_byte_size * BITS_PER_UNIT;
1053 bit_range bits_within_element
1054 (bits.m_start_bit_offset - start_of_element,
1055 bits.m_size_in_bits);
1056 const svalue *idx_sval
1057 = get_or_create_int_cst (integer_type_node, start_idx);
1058 const region *element_reg =
1059 get_element_region (initial_sval->get_region (),
1060 element_type, idx_sval);
1061 const svalue *element_reg_sval
1062 = get_or_create_initial_value (element_reg);
1063 return get_or_create_bits_within (type,
1064 bits_within_element,
1065 element_reg_sval);
1066 }
1067 }
1068 }
1069 }
1070 break;
1071 case RECORD_TYPE:
1072 {
1073 /* Fold:
1074 BYTES_WITHIN (range, KIND(REG))
1075 to:
1076 BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1077 if range1 is fully within FIELD. */
1078 byte_range bytes_within_field (0, 0);
1079 if (tree field = get_field_at_byte_range (inner_type, bytes,
1080 &bytes_within_field))
1081 {
1082 if (const initial_svalue *initial_sval
1083 = inner_svalue->dyn_cast_initial_svalue ())
1084 {
1085 const region *field_reg =
1086 get_field_region (initial_sval->get_region (), field);
1087 const svalue *initial_reg_sval
1088 = get_or_create_initial_value (field_reg);
1089 return get_or_create_bits_within
1090 (type,
1091 bytes_within_field.as_bit_range (),
1092 initial_reg_sval);
1093 }
1094 }
1095 }
1096 break;
1097 }
1098 return NULL;
1099}
1100
1101/* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1102 creating it if necessary. */
1103
1104const svalue *
1105region_model_manager::get_or_create_bits_within (tree type,
1106 const bit_range &bits,
1107 const svalue *inner_svalue)
1108{
1109 if (const svalue *folded
1110 = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1111 return folded;
1112
1113 bits_within_svalue::key_t key (type, bits, inner_svalue);
1114 if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1115 return *slot;
1116 bits_within_svalue *bits_within_sval
1117 = new bits_within_svalue (type, bits, inner_svalue);
1118 RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1119 m_bits_within_values_map.put (key, bits_within_sval);
1120 return bits_within_sval;
1121}
1122
808f4dfe
DM
1123/* Return the svalue * that decorates ARG as being unmergeable,
1124 creating it if necessary. */
1125
1126const svalue *
1127region_model_manager::get_or_create_unmergeable (const svalue *arg)
1128{
1129 if (arg->get_kind () == SK_UNMERGEABLE)
1130 return arg;
1131
1132 if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1133 return *slot;
1134 unmergeable_svalue *unmergeable_sval = new unmergeable_svalue (arg);
1135 RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1136 m_unmergeable_values_map.put (arg, unmergeable_sval);
1137 return unmergeable_sval;
1138}
1139
1140/* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1141 and ITER_SVAL at POINT, creating it if necessary. */
1142
1143const svalue *
1144region_model_manager::get_or_create_widening_svalue (tree type,
1145 const program_point &point,
1146 const svalue *base_sval,
1147 const svalue *iter_sval)
1148{
2fc20138
DM
1149 gcc_assert (base_sval->get_kind () != SK_WIDENING);
1150 gcc_assert (iter_sval->get_kind () != SK_WIDENING);
808f4dfe
DM
1151 widening_svalue::key_t key (type, point, base_sval, iter_sval);
1152 if (widening_svalue **slot = m_widening_values_map.get (key))
1153 return *slot;
1154 widening_svalue *widening_sval
1155 = new widening_svalue (type, point, base_sval, iter_sval);
1156 RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1157 m_widening_values_map.put (key, widening_sval);
1158 return widening_sval;
1159}
1160
1161/* Return the svalue * of type TYPE for the compound values in MAP,
1162 creating it if necessary. */
1163
1164const svalue *
1165region_model_manager::get_or_create_compound_svalue (tree type,
1166 const binding_map &map)
1167{
1168 compound_svalue::key_t tmp_key (type, &map);
1169 if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1170 return *slot;
1171 compound_svalue *compound_sval
1172 = new compound_svalue (type, map);
1173 RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1174 /* Use make_key rather than reusing the key, so that we use a
1175 ptr to compound_sval's binding_map, rather than the MAP param. */
1176 m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1177 return compound_sval;
1178}
1179
3734527d
DM
1180/* class conjured_purge. */
1181
1182/* Purge state relating to SVAL. */
1183
1184void
1185conjured_purge::purge (const conjured_svalue *sval) const
1186{
1187 m_model->purge_state_involving (sval, m_ctxt);
1188}
1189
808f4dfe 1190/* Return the svalue * of type TYPE for the value conjured for ID_REG
3734527d
DM
1191 at STMT, creating it if necessary.
1192 Use P to purge existing state from the svalue, for the case where a
1193 conjured_svalue would be reused along an execution path. */
808f4dfe
DM
1194
1195const svalue *
1196region_model_manager::get_or_create_conjured_svalue (tree type,
1197 const gimple *stmt,
3734527d
DM
1198 const region *id_reg,
1199 const conjured_purge &p)
808f4dfe
DM
1200{
1201 conjured_svalue::key_t key (type, stmt, id_reg);
1202 if (conjured_svalue **slot = m_conjured_values_map.get (key))
3734527d
DM
1203 {
1204 const conjured_svalue *sval = *slot;
1205 /* We're reusing an existing conjured_svalue, perhaps from a different
1206 state within this analysis, or perhaps from an earlier state on this
1207 execution path. For the latter, purge any state involving the "new"
1208 svalue from the current program_state. */
1209 p.purge (sval);
1210 return sval;
1211 }
808f4dfe
DM
1212 conjured_svalue *conjured_sval
1213 = new conjured_svalue (type, stmt, id_reg);
1214 RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1215 m_conjured_values_map.put (key, conjured_sval);
1216 return conjured_sval;
1217}
1218
ded2c2c0
DM
1219/* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1220 Return a folded svalue, or NULL. */
1221
1222const svalue *
1223region_model_manager::
1224maybe_fold_asm_output_svalue (tree type,
1225 const vec<const svalue *> &inputs)
1226{
1227 /* Unknown inputs should lead to unknown results. */
1228 for (const auto &iter : inputs)
1229 if (iter->get_kind () == SK_UNKNOWN)
1230 return get_or_create_unknown_svalue (type);
1231
1232 return NULL;
1233}
1234
1235/* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1236 asm stmt ASM_STMT, given INPUTS as inputs. */
1237
1238const svalue *
1239region_model_manager::
1240get_or_create_asm_output_svalue (tree type,
1241 const gasm *asm_stmt,
1242 unsigned output_idx,
1243 const vec<const svalue *> &inputs)
1244{
1245 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1246
1247 if (const svalue *folded
1248 = maybe_fold_asm_output_svalue (type, inputs))
1249 return folded;
1250
1251 const char *asm_string = gimple_asm_string (asm_stmt);
1252 const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1253
1254 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1255 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1256 return *slot;
1257 asm_output_svalue *asm_output_sval
1258 = new asm_output_svalue (type, asm_string, output_idx, noutputs, inputs);
1259 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1260 m_asm_output_values_map.put (key, asm_output_sval);
1261 return asm_output_sval;
1262}
1263
aee1adf2
DM
1264
1265/* Return the svalue * of type TYPE for the result of a call to FNDECL
1266 with __attribute__((const)), given INPUTS as inputs. */
1267
1268const svalue *
1269region_model_manager::
1270get_or_create_const_fn_result_svalue (tree type,
1271 tree fndecl,
1272 const vec<const svalue *> &inputs)
1273{
1274 gcc_assert (type);
1275 gcc_assert (fndecl);
1276 gcc_assert (DECL_P (fndecl));
1277 gcc_assert (TREE_READONLY (fndecl));
1278 gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1279
1280 const_fn_result_svalue::key_t key (type, fndecl, inputs);
1281 if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1282 return *slot;
1283 const_fn_result_svalue *const_fn_result_sval
1284 = new const_fn_result_svalue (type, fndecl, inputs);
1285 RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1286 m_const_fn_result_values_map.put (key, const_fn_result_sval);
1287 return const_fn_result_sval;
1288}
1289
808f4dfe
DM
1290/* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1291 attempt to get the character at that offset, returning either
1292 the svalue for the character constant, or NULL if unsuccessful. */
1293
1294const svalue *
1295region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1296 tree byte_offset_cst)
1297{
1298 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1299
1300 /* Adapted from fold_read_from_constant_string. */
1301 scalar_int_mode char_mode;
1302 if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1303 && compare_tree_int (byte_offset_cst,
1304 TREE_STRING_LENGTH (string_cst)) < 0
1305 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1306 &char_mode)
1307 && GET_MODE_SIZE (char_mode) == 1)
1308 {
1309 tree char_cst
1310 = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)),
1311 (TREE_STRING_POINTER (string_cst)
1312 [TREE_INT_CST_LOW (byte_offset_cst)]));
1313 return get_or_create_constant_svalue (char_cst);
1314 }
1315 return NULL;
1316}
1317
1318/* region consolidation. */
1319
1320/* Return the region for FNDECL, creating it if necessary. */
1321
1322const function_region *
1323region_model_manager::get_region_for_fndecl (tree fndecl)
1324{
1325 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1326
1327 function_region **slot = m_fndecls_map.get (fndecl);
1328 if (slot)
1329 return *slot;
1330 function_region *reg
1331 = new function_region (alloc_region_id (), &m_code_region, fndecl);
1332 m_fndecls_map.put (fndecl, reg);
1333 return reg;
1334}
1335
1336/* Return the region for LABEL, creating it if necessary. */
1337
1338const label_region *
1339region_model_manager::get_region_for_label (tree label)
1340{
1341 gcc_assert (TREE_CODE (label) == LABEL_DECL);
1342
1343 label_region **slot = m_labels_map.get (label);
1344 if (slot)
1345 return *slot;
1346
1347 tree fndecl = DECL_CONTEXT (label);
1348 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1349
1350 const function_region *func_reg = get_region_for_fndecl (fndecl);
1351 label_region *reg
1352 = new label_region (alloc_region_id (), func_reg, label);
1353 m_labels_map.put (label, reg);
1354 return reg;
1355}
1356
1357/* Return the region for EXPR, creating it if necessary. */
1358
1359const decl_region *
1360region_model_manager::get_region_for_global (tree expr)
1361{
1362 gcc_assert (TREE_CODE (expr) == VAR_DECL);
1363
1364 decl_region **slot = m_globals_map.get (expr);
1365 if (slot)
1366 return *slot;
1367 decl_region *reg
1368 = new decl_region (alloc_region_id (), &m_globals_region, expr);
1369 m_globals_map.put (expr, reg);
1370 return reg;
1371}
1372
3d41408c
DM
1373/* Return the region for an unknown access of type REGION_TYPE,
1374 creating it if necessary.
1375 This is a symbolic_region, where the pointer is an unknown_svalue
1376 of type &REGION_TYPE. */
1377
1378const region *
1379region_model_manager::get_unknown_symbolic_region (tree region_type)
1380{
1381 tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1382 const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1383 return get_symbolic_region (unknown_ptr);
1384}
1385
808f4dfe
DM
1386/* Return the region that describes accessing field FIELD of PARENT,
1387 creating it if necessary. */
1388
1389const region *
1390region_model_manager::get_field_region (const region *parent, tree field)
1391{
00cb0f58
DM
1392 gcc_assert (TREE_CODE (field) == FIELD_DECL);
1393
11d4ec5d
DM
1394 /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1395 if (parent->symbolic_for_unknown_ptr_p ())
3d41408c 1396 return get_unknown_symbolic_region (TREE_TYPE (field));
11d4ec5d 1397
808f4dfe
DM
1398 field_region::key_t key (parent, field);
1399 if (field_region *reg = m_field_regions.get (key))
1400 return reg;
1401
1402 field_region *field_reg
1403 = new field_region (alloc_region_id (), parent, field);
1404 m_field_regions.put (key, field_reg);
1405 return field_reg;
1406}
1407
1408/* Return the region that describes accessing the element of type
1409 ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1410
1411const region *
1412region_model_manager::get_element_region (const region *parent,
1413 tree element_type,
1414 const svalue *index)
1415{
3d41408c
DM
1416 /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1417 if (parent->symbolic_for_unknown_ptr_p ())
1418 return get_unknown_symbolic_region (element_type);
1419
808f4dfe
DM
1420 element_region::key_t key (parent, element_type, index);
1421 if (element_region *reg = m_element_regions.get (key))
1422 return reg;
1423
1424 element_region *element_reg
1425 = new element_region (alloc_region_id (), parent, element_type, index);
1426 m_element_regions.put (key, element_reg);
1427 return element_reg;
1428}
1429
1430/* Return the region that describes accessing the subregion of type
1431 ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1432 necessary. */
1433
1434const region *
1435region_model_manager::get_offset_region (const region *parent,
1436 tree type,
1437 const svalue *byte_offset)
1438{
3d41408c
DM
1439 /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1440 if (parent->symbolic_for_unknown_ptr_p ())
1441 return get_unknown_symbolic_region (type);
1442
808f4dfe
DM
1443 /* If BYTE_OFFSET is zero, return PARENT. */
1444 if (tree cst_offset = byte_offset->maybe_get_constant ())
1445 if (zerop (cst_offset))
1446 return get_cast_region (parent, type);
1447
1448 /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1449 to OFFSET_REGION(REG, (X + Y)). */
1450 if (const offset_region *parent_offset_reg
1451 = parent->dyn_cast_offset_region ())
1452 {
1453 const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1454 const svalue *sval_sum
1455 = get_or_create_binop (byte_offset->get_type (),
1456 PLUS_EXPR, sval_x, byte_offset);
1457 return get_offset_region (parent->get_parent_region (), type, sval_sum);
1458 }
1459
1460 offset_region::key_t key (parent, type, byte_offset);
1461 if (offset_region *reg = m_offset_regions.get (key))
1462 return reg;
1463
1464 offset_region *offset_reg
1465 = new offset_region (alloc_region_id (), parent, type, byte_offset);
1466 m_offset_regions.put (key, offset_reg);
1467 return offset_reg;
1468}
1469
e61ffa20
DM
1470/* Return the region that describes accessing the subregion of type
1471 TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1472
1473const region *
1474region_model_manager::get_sized_region (const region *parent,
1475 tree type,
1476 const svalue *byte_size_sval)
1477{
3d41408c
DM
1478 if (parent->symbolic_for_unknown_ptr_p ())
1479 return get_unknown_symbolic_region (type);
1480
e61ffa20
DM
1481 if (byte_size_sval->get_type () != size_type_node)
1482 byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1483
1484 /* If PARENT is already that size, return it. */
1485 const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1486 if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1487 if (tree size_cst = byte_size_sval->maybe_get_constant ())
1488 {
1489 tree comparison
1490 = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1491 if (comparison == boolean_true_node)
1492 return parent;
1493 }
1494
1495 sized_region::key_t key (parent, type, byte_size_sval);
1496 if (sized_region *reg = m_sized_regions.get (key))
1497 return reg;
1498
1499 sized_region *sized_reg
1500 = new sized_region (alloc_region_id (), parent, type, byte_size_sval);
1501 m_sized_regions.put (key, sized_reg);
1502 return sized_reg;
1503}
1504
808f4dfe
DM
1505/* Return the region that describes accessing PARENT_REGION as if
1506 it were of type TYPE, creating it if necessary. */
1507
1508const region *
1509region_model_manager::get_cast_region (const region *original_region,
1510 tree type)
1511{
1512 /* If types match, return ORIGINAL_REGION. */
1513 if (type == original_region->get_type ())
1514 return original_region;
1515
3d41408c
DM
1516 if (original_region->symbolic_for_unknown_ptr_p ())
1517 return get_unknown_symbolic_region (type);
1518
808f4dfe
DM
1519 cast_region::key_t key (original_region, type);
1520 if (cast_region *reg = m_cast_regions.get (key))
1521 return reg;
1522
1523 cast_region *cast_reg
1524 = new cast_region (alloc_region_id (), original_region, type);
1525 m_cast_regions.put (key, cast_reg);
1526 return cast_reg;
1527}
1528
1529/* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1530 if necessary. CALLING_FRAME may be NULL. */
1531
1532const frame_region *
1533region_model_manager::get_frame_region (const frame_region *calling_frame,
1534 function *fun)
1535{
1536 int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1537
1538 frame_region::key_t key (calling_frame, fun);
1539 if (frame_region *reg = m_frame_regions.get (key))
1540 return reg;
1541
1542 frame_region *frame_reg
1543 = new frame_region (alloc_region_id (), &m_stack_region, calling_frame,
1544 fun, index);
1545 m_frame_regions.put (key, frame_reg);
1546 return frame_reg;
1547}
1548
1549/* Return the region that describes dereferencing SVAL, creating it
1550 if necessary. */
1551
1552const region *
1553region_model_manager::get_symbolic_region (const svalue *sval)
1554{
1555 symbolic_region::key_t key (&m_root_region, sval);
1556 if (symbolic_region *reg = m_symbolic_regions.get (key))
1557 return reg;
1558
1559 symbolic_region *symbolic_reg
1560 = new symbolic_region (alloc_region_id (), &m_root_region, sval);
1561 m_symbolic_regions.put (key, symbolic_reg);
1562 return symbolic_reg;
1563}
1564
1565/* Return the region that describes accessing STRING_CST, creating it
1566 if necessary. */
1567
1568const string_region *
1569region_model_manager::get_region_for_string (tree string_cst)
1570{
1571 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1572
1573 string_region **slot = m_string_map.get (string_cst);
1574 if (slot)
1575 return *slot;
1576 string_region *reg
1577 = new string_region (alloc_region_id (), &m_root_region, string_cst);
1578 m_string_map.put (string_cst, reg);
1579 return reg;
1580}
1581
93e759fc
DM
1582/* Return the region that describes accessing BITS within PARENT as TYPE,
1583 creating it if necessary. */
1584
1585const region *
1586region_model_manager::get_bit_range (const region *parent, tree type,
1587 const bit_range &bits)
1588{
1589 gcc_assert (parent);
1590
3d41408c
DM
1591 if (parent->symbolic_for_unknown_ptr_p ())
1592 return get_unknown_symbolic_region (type);
1593
93e759fc
DM
1594 bit_range_region::key_t key (parent, type, bits);
1595 if (bit_range_region *reg = m_bit_range_regions.get (key))
1596 return reg;
1597
1598 bit_range_region *bit_range_reg
1599 = new bit_range_region (alloc_region_id (), parent, type, bits);
1600 m_bit_range_regions.put (key, bit_range_reg);
1601 return bit_range_reg;
1602}
1603
808f4dfe
DM
1604/* If we see a tree code we don't know how to handle, rather than
1605 ICE or generate bogus results, create a dummy region, and notify
1606 CTXT so that it can mark the new state as being not properly
1607 modelled. The exploded graph can then stop exploring that path,
1608 since any diagnostics we might issue will have questionable
1609 validity. */
1610
1611const region *
1612region_model_manager::
1613get_region_for_unexpected_tree_code (region_model_context *ctxt,
1614 tree t,
1615 const dump_location_t &loc)
1616{
808f4dfe
DM
1617 tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1618 region *new_reg
1619 = new unknown_region (alloc_region_id (), &m_root_region, type);
b00a8304
DM
1620 if (ctxt)
1621 ctxt->on_unexpected_tree_code (t, loc);
808f4dfe
DM
1622 return new_reg;
1623}
1624
1625/* Return a new region describing a heap-allocated block of memory. */
1626
1627const region *
1628region_model_manager::create_region_for_heap_alloc ()
1629{
1630 region *reg
1631 = new heap_allocated_region (alloc_region_id (), &m_heap_region);
1632 m_managed_dynamic_regions.safe_push (reg);
1633 return reg;
1634}
1635
1636/* Return a new region describing a block of memory allocated within FRAME. */
1637
1638const region *
1639region_model_manager::create_region_for_alloca (const frame_region *frame)
1640{
1641 gcc_assert (frame);
1642 region *reg = new alloca_region (alloc_region_id (), frame);
1643 m_managed_dynamic_regions.safe_push (reg);
1644 return reg;
1645}
1646
1647/* Log OBJ to LOGGER. */
1648
1649template <typename T>
1650static void
1651log_managed_object (logger *logger, const T *obj)
1652{
1653 logger->start_log_line ();
1654 pretty_printer *pp = logger->get_printer ();
1655 pp_string (pp, " ");
1656 obj->dump_to_pp (pp, true);
1657 logger->end_log_line ();
1658}
1659
1660/* Specialization for frame_region, which also logs the count of locals
1661 managed by the frame_region. */
1662
1663template <>
1664void
1665log_managed_object (logger *logger, const frame_region *obj)
1666{
1667 logger->start_log_line ();
1668 pretty_printer *pp = logger->get_printer ();
1669 pp_string (pp, " ");
1670 obj->dump_to_pp (pp, true);
1671 pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
1672 logger->end_log_line ();
1673}
1674
1675/* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1676 If SHOW_OBJS is true, also dump the objects themselves. */
1677
1678template <typename K, typename T>
1679static void
1680log_uniq_map (logger *logger, bool show_objs, const char *title,
1681 const hash_map<K, T*> &uniq_map)
1682{
3989337e 1683 logger->log (" # %s: %li", title, (long)uniq_map.elements ());
b0702ac5
DM
1684 if (!show_objs)
1685 return;
1686 auto_vec<const T *> vec_objs (uniq_map.elements ());
1687 for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
1688 iter != uniq_map.end (); ++iter)
1689 vec_objs.quick_push ((*iter).second);
1690
1691 vec_objs.qsort (T::cmp_ptr_ptr);
1692
1693 unsigned i;
1694 const T *obj;
1695 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1696 log_managed_object<T> (logger, obj);
808f4dfe
DM
1697}
1698
1699/* Dump the number of objects that were managed by MAP to LOGGER.
1700 If SHOW_OBJS is true, also dump the objects themselves. */
1701
1702template <typename T>
1703static void
1704log_uniq_map (logger *logger, bool show_objs, const char *title,
1705 const consolidation_map<T> &map)
1706{
3989337e 1707 logger->log (" # %s: %li", title, (long)map.elements ());
b0702ac5
DM
1708 if (!show_objs)
1709 return;
1710
1711 auto_vec<const T *> vec_objs (map.elements ());
1712 for (typename consolidation_map<T>::iterator iter = map.begin ();
1713 iter != map.end (); ++iter)
1714 vec_objs.quick_push ((*iter).second);
1715
1716 vec_objs.qsort (T::cmp_ptr_ptr);
1717
1718 unsigned i;
1719 const T *obj;
1720 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1721 log_managed_object<T> (logger, obj);
808f4dfe
DM
1722}
1723
1724/* Dump the number of objects of each class that were managed by this
1725 manager to LOGGER.
1726 If SHOW_OBJS is true, also dump the objects themselves. */
1727
1728void
1729region_model_manager::log_stats (logger *logger, bool show_objs) const
1730{
1731 LOG_SCOPE (logger);
1732 logger->log ("svalue consolidation");
1733 log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
1734 log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
1735 if (m_unknown_NULL)
1736 log_managed_object (logger, m_unknown_NULL);
1737 log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
1738 log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
1739 log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
1740 log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
1741 log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
1742 log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
1743 log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
e61ffa20
DM
1744 log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
1745 log_uniq_map (logger, show_objs, "bits_within_svalue",
1746 m_bits_within_values_map);
808f4dfe
DM
1747 log_uniq_map (logger, show_objs, "unmergeable_svalue",
1748 m_unmergeable_values_map);
1749 log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
1750 log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
1751 log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
ded2c2c0
DM
1752 log_uniq_map (logger, show_objs, "asm_output_svalue",
1753 m_asm_output_values_map);
aee1adf2
DM
1754 log_uniq_map (logger, show_objs, "const_fn_result_svalue",
1755 m_const_fn_result_values_map);
ded2c2c0 1756
808f4dfe
DM
1757 logger->log ("max accepted svalue num_nodes: %i",
1758 m_max_complexity.m_num_nodes);
1759 logger->log ("max accepted svalue max_depth: %i",
1760 m_max_complexity.m_max_depth);
1761
1762 logger->log ("region consolidation");
1763 logger->log (" next region id: %i", m_next_region_id);
1764 log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
1765 log_uniq_map (logger, show_objs, "label_region", m_labels_map);
1766 log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
1767 log_uniq_map (logger, show_objs, "field_region", m_field_regions);
1768 log_uniq_map (logger, show_objs, "element_region", m_element_regions);
1769 log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
e61ffa20 1770 log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
808f4dfe
DM
1771 log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
1772 log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
1773 log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
1774 log_uniq_map (logger, show_objs, "string_region", m_string_map);
93e759fc 1775 log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
808f4dfe
DM
1776 logger->log (" # managed dynamic regions: %i",
1777 m_managed_dynamic_regions.length ());
1778 m_store_mgr.log_stats (logger, show_objs);
8ca7fa84 1779 m_range_mgr->log_stats (logger, show_objs);
808f4dfe
DM
1780}
1781
1782/* Dump the number of objects of each class that were managed by this
1783 manager to LOGGER.
1784 If SHOW_OBJS is true, also dump the objects themselves.
1785 This is here so it can use log_uniq_map. */
1786
1787void
1788store_manager::log_stats (logger *logger, bool show_objs) const
1789{
1790 LOG_SCOPE (logger);
1791 log_uniq_map (logger, show_objs, "concrete_binding",
1792 m_concrete_binding_key_mgr);
1793 log_uniq_map (logger, show_objs, "symbolic_binding",
1794 m_symbolic_binding_key_mgr);
1795}
1796
5f6197d7
DM
1797/* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
1798 (using -fdump-analyzer-untracked). */
1799
1800static void
1801dump_untracked_region (const decl_region *decl_reg)
1802{
1803 tree decl = decl_reg->get_decl ();
1804 if (TREE_CODE (decl) != VAR_DECL)
1805 return;
c788a0ea
DM
1806 /* For now, don't emit the status of decls in the constant pool, to avoid
1807 differences in DejaGnu test results between targets that use these vs
1808 those that don't.
1809 (Eventually these decls should probably be untracked and we should test
1810 for that, but that's not stage 4 material). */
1811 if (DECL_IN_CONSTANT_POOL (decl))
1812 return;
5f6197d7
DM
1813 warning_at (DECL_SOURCE_LOCATION (decl), 0,
1814 "track %qD: %s",
1815 decl, (decl_reg->tracked_p () ? "yes" : "no"));
1816}
1817
1818/* Implementation of -fdump-analyzer-untracked. */
1819
1820void
1821region_model_manager::dump_untracked_regions () const
1822{
1823 for (auto iter : m_globals_map)
1824 {
1825 const decl_region *decl_reg = iter.second;
1826 dump_untracked_region (decl_reg);
1827 }
1828 for (auto frame_iter : m_frame_regions)
1829 {
1830 const frame_region *frame_reg = frame_iter.second;
1831 frame_reg->dump_untracked_regions ();
1832 }
1833}
1834
1835void
1836frame_region::dump_untracked_regions () const
1837{
1838 for (auto iter : m_locals)
1839 {
1840 const decl_region *decl_reg = iter.second;
1841 dump_untracked_region (decl_reg);
1842 }
1843}
1844
808f4dfe
DM
1845} // namespace ana
1846
1847#endif /* #if ENABLE_ANALYZER */