]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model-manager.cc
analyzer: fix ICE on region creation during get_referenced_base_regions [PR108003]
[thirdparty/gcc.git] / gcc / analyzer / region-model-manager.cc
CommitLineData
808f4dfe 1/* Consolidation of svalues and regions.
7adcbafe 2 Copyright (C) 2020-2022 Free Software Foundation, Inc.
808f4dfe
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
6341f14e 22#define INCLUDE_MEMORY
808f4dfe
DM
23#include "system.h"
24#include "coretypes.h"
25#include "tree.h"
26#include "diagnostic-core.h"
27#include "gimple-pretty-print.h"
28#include "function.h"
29#include "basic-block.h"
30#include "gimple.h"
31#include "gimple-iterator.h"
32#include "diagnostic-core.h"
33#include "graphviz.h"
34#include "options.h"
35#include "cgraph.h"
36#include "tree-dfa.h"
37#include "stringpool.h"
38#include "convert.h"
39#include "target.h"
40#include "fold-const.h"
41#include "tree-pretty-print.h"
808f4dfe 42#include "bitmap.h"
808f4dfe
DM
43#include "analyzer/analyzer.h"
44#include "analyzer/analyzer-logging.h"
45#include "ordered-hash-map.h"
46#include "options.h"
808f4dfe
DM
47#include "analyzer/supergraph.h"
48#include "sbitmap.h"
49#include "analyzer/call-string.h"
50#include "analyzer/program-point.h"
51#include "analyzer/store.h"
52#include "analyzer/region-model.h"
8ca7fa84 53#include "analyzer/constraint-manager.h"
808f4dfe
DM
54
55#if ENABLE_ANALYZER
56
57namespace ana {
58
59/* class region_model_manager. */
60
61/* region_model_manager's ctor. */
62
11a2ff8d
DM
63region_model_manager::region_model_manager (logger *logger)
64: m_logger (logger),
bb8e93eb 65 m_empty_call_string (),
11a2ff8d 66 m_next_region_id (0),
808f4dfe
DM
67 m_root_region (alloc_region_id ()),
68 m_stack_region (alloc_region_id (), &m_root_region),
69 m_heap_region (alloc_region_id (), &m_root_region),
70 m_unknown_NULL (NULL),
4f34f8cc 71 m_checking_feasibility (false),
808f4dfe
DM
72 m_max_complexity (0, 0),
73 m_code_region (alloc_region_id (), &m_root_region),
74 m_fndecls_map (), m_labels_map (),
75 m_globals_region (alloc_region_id (), &m_root_region),
76 m_globals_map (),
3d2d04cd
DM
77 m_thread_local_region (alloc_region_id (), &m_root_region),
78 m_errno_region (alloc_region_id (), &m_thread_local_region),
8ca7fa84 79 m_store_mgr (this),
07e30160
DM
80 m_range_mgr (new bounded_ranges_manager ()),
81 m_known_fn_mgr (logger)
808f4dfe
DM
82{
83}
84
85/* region_model_manager's dtor. Delete all of the managed svalues
86 and regions. */
87
88region_model_manager::~region_model_manager ()
89{
90 /* Delete consolidated svalues. */
91 for (constants_map_t::iterator iter = m_constants_map.begin ();
92 iter != m_constants_map.end (); ++iter)
93 delete (*iter).second;
94 for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
95 iter != m_unknowns_map.end (); ++iter)
96 delete (*iter).second;
97 delete m_unknown_NULL;
808f4dfe
DM
98 for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
99 iter != m_poisoned_values_map.end (); ++iter)
100 delete (*iter).second;
99988b0e
DM
101 for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
102 iter != m_setjmp_values_map.end (); ++iter)
103 delete (*iter).second;
808f4dfe
DM
104 for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
105 iter != m_initial_values_map.end (); ++iter)
106 delete (*iter).second;
107 for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
108 iter != m_pointer_values_map.end (); ++iter)
109 delete (*iter).second;
110 for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
111 iter != m_unaryop_values_map.end (); ++iter)
112 delete (*iter).second;
113 for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
114 iter != m_binop_values_map.end (); ++iter)
115 delete (*iter).second;
116 for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
117 iter != m_sub_values_map.end (); ++iter)
118 delete (*iter).second;
99988b0e
DM
119 for (auto iter : m_repeated_values_map)
120 delete iter.second;
121 for (auto iter : m_bits_within_values_map)
122 delete iter.second;
808f4dfe
DM
123 for (unmergeable_values_map_t::iterator iter
124 = m_unmergeable_values_map.begin ();
125 iter != m_unmergeable_values_map.end (); ++iter)
126 delete (*iter).second;
127 for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
128 iter != m_widening_values_map.end (); ++iter)
129 delete (*iter).second;
130 for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
131 iter != m_compound_values_map.end (); ++iter)
132 delete (*iter).second;
133 for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
134 iter != m_conjured_values_map.end (); ++iter)
135 delete (*iter).second;
99988b0e
DM
136 for (auto iter : m_asm_output_values_map)
137 delete iter.second;
138 for (auto iter : m_const_fn_result_values_map)
139 delete iter.second;
808f4dfe
DM
140
141 /* Delete consolidated regions. */
142 for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
143 iter != m_fndecls_map.end (); ++iter)
144 delete (*iter).second;
145 for (labels_map_t::iterator iter = m_labels_map.begin ();
146 iter != m_labels_map.end (); ++iter)
147 delete (*iter).second;
148 for (globals_map_t::iterator iter = m_globals_map.begin ();
149 iter != m_globals_map.end (); ++iter)
150 delete (*iter).second;
151 for (string_map_t::iterator iter = m_string_map.begin ();
152 iter != m_string_map.end (); ++iter)
153 delete (*iter).second;
8ca7fa84
DM
154
155 delete m_range_mgr;
808f4dfe
DM
156}
157
158/* Return true if C exceeds the complexity limit for svalues. */
159
160bool
161region_model_manager::too_complex_p (const complexity &c) const
162{
163 if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
164 return true;
165 return false;
166}
167
168/* If SVAL exceeds the complexity limit for svalues, delete it
169 and return true.
170 Otherwise update m_max_complexity and return false. */
171
172bool
173region_model_manager::reject_if_too_complex (svalue *sval)
174{
4f34f8cc 175 if (m_checking_feasibility)
60933a14
DM
176 return false;
177
808f4dfe
DM
178 const complexity &c = sval->get_complexity ();
179 if (!too_complex_p (c))
180 {
181 if (m_max_complexity.m_num_nodes < c.m_num_nodes)
182 m_max_complexity.m_num_nodes = c.m_num_nodes;
183 if (m_max_complexity.m_max_depth < c.m_max_depth)
184 m_max_complexity.m_max_depth = c.m_max_depth;
185 return false;
186 }
187
188 delete sval;
189 return true;
190}
191
192/* Macro for imposing a complexity limit on svalues, for use within
193 region_model_manager member functions.
194
195 If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
196 value of the same type.
197 Otherwise update m_max_complexity and carry on. */
198
199#define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
200 do { \
201 svalue *sval_ = (SVAL); \
202 tree type_ = sval_->get_type (); \
203 if (reject_if_too_complex (sval_)) \
204 return get_or_create_unknown_svalue (type_); \
205 } while (0)
206
207/* svalue consolidation. */
208
209/* Return the svalue * for a constant_svalue for CST_EXPR,
210 creating it if necessary.
211 The constant_svalue instances are reused, based on pointer equality
212 of trees */
213
214const svalue *
215region_model_manager::get_or_create_constant_svalue (tree cst_expr)
216{
217 gcc_assert (cst_expr);
2aefe248 218 gcc_assert (CONSTANT_CLASS_P (cst_expr));
808f4dfe
DM
219
220 constant_svalue **slot = m_constants_map.get (cst_expr);
221 if (slot)
222 return *slot;
223 constant_svalue *cst_sval = new constant_svalue (cst_expr);
224 RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
225 m_constants_map.put (cst_expr, cst_sval);
226 return cst_sval;
227}
228
1aff29d4
DM
229/* Return the svalue * for a constant_svalue for the INTEGER_CST
230 for VAL of type TYPE, creating it if necessary. */
231
232const svalue *
233region_model_manager::get_or_create_int_cst (tree type, poly_int64 val)
234{
235 gcc_assert (type);
236 tree tree_cst = build_int_cst (type, val);
237 return get_or_create_constant_svalue (tree_cst);
238}
239
dcfc7ac9
DM
240/* Return the svalue * for the constant_svalue for the NULL pointer
241 of POINTER_TYPE, creating it if necessary. */
242
243const svalue *
244region_model_manager::get_or_create_null_ptr (tree pointer_type)
245{
246 gcc_assert (pointer_type);
247 gcc_assert (POINTER_TYPE_P (pointer_type));
248 return get_or_create_int_cst (pointer_type, 0);
249}
250
808f4dfe
DM
251/* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
252 creating it if necessary.
253 The unknown_svalue instances are reused, based on pointer equality
254 of the types */
255
256const svalue *
257region_model_manager::get_or_create_unknown_svalue (tree type)
258{
4f34f8cc
DM
259 /* Don't create unknown values when doing feasibility testing;
260 instead, create a unique svalue. */
261 if (m_checking_feasibility)
262 return create_unique_svalue (type);
263
808f4dfe
DM
264 /* Special-case NULL, so that the hash_map can use NULL as the
265 "empty" value. */
266 if (type == NULL_TREE)
267 {
268 if (!m_unknown_NULL)
269 m_unknown_NULL = new unknown_svalue (type);
270 return m_unknown_NULL;
271 }
272
273 unknown_svalue **slot = m_unknowns_map.get (type);
274 if (slot)
275 return *slot;
276 unknown_svalue *sval = new unknown_svalue (type);
277 m_unknowns_map.put (type, sval);
278 return sval;
279}
280
4f34f8cc
DM
281/* Return a freshly-allocated svalue of TYPE, owned by this manager. */
282
283const svalue *
284region_model_manager::create_unique_svalue (tree type)
285{
286 svalue *sval = new placeholder_svalue (type, "unique");
287 m_managed_dynamic_svalues.safe_push (sval);
288 return sval;
289}
290
808f4dfe
DM
291/* Return the svalue * for the initial value of REG, creating it if
292 necessary. */
293
294const svalue *
295region_model_manager::get_or_create_initial_value (const region *reg)
296{
33255ad3
DM
297 if (!reg->can_have_initial_svalue_p ())
298 return get_or_create_poisoned_svalue (POISON_KIND_UNINIT,
299 reg->get_type ());
300
808f4dfe
DM
301 /* The initial value of a cast is a cast of the initial value. */
302 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
303 {
304 const region *original_reg = cast_reg->get_original_region ();
305 return get_or_create_cast (cast_reg->get_type (),
306 get_or_create_initial_value (original_reg));
307 }
308
11d4ec5d
DM
309 /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
310 if (reg->symbolic_for_unknown_ptr_p ())
311 return get_or_create_unknown_svalue (reg->get_type ());
312
808f4dfe
DM
313 if (initial_svalue **slot = m_initial_values_map.get (reg))
314 return *slot;
315 initial_svalue *initial_sval = new initial_svalue (reg->get_type (), reg);
316 RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
317 m_initial_values_map.put (reg, initial_sval);
318 return initial_sval;
319}
320
321/* Return the svalue * for R using type TYPE, creating it if
322 necessary. */
323
324const svalue *
325region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
326 tree type)
327{
328 setjmp_svalue::key_t key (r, type);
329 if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
330 return *slot;
331 setjmp_svalue *setjmp_sval = new setjmp_svalue (r, type);
332 RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
333 m_setjmp_values_map.put (key, setjmp_sval);
334 return setjmp_sval;
335}
336
337/* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
338 necessary. */
339
340const svalue *
341region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
342 tree type)
343{
344 poisoned_svalue::key_t key (kind, type);
345 if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
346 return *slot;
347 poisoned_svalue *poisoned_sval = new poisoned_svalue (kind, type);
348 RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
349 m_poisoned_values_map.put (key, poisoned_sval);
350 return poisoned_sval;
351}
352
353/* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
354 creating it if necessary. */
355
356const svalue *
357region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
358{
359 /* If this is a symbolic region from dereferencing a pointer, and the types
360 match, then return the original pointer. */
361 if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
362 if (ptr_type == sym_reg->get_pointer ()->get_type ())
363 return sym_reg->get_pointer ();
364
365 region_svalue::key_t key (ptr_type, pointee);
366 if (region_svalue **slot = m_pointer_values_map.get (key))
367 return *slot;
368 region_svalue *sval = new region_svalue (ptr_type, pointee);
369 RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
370 m_pointer_values_map.put (key, sval);
371 return sval;
372}
373
374/* Subroutine of region_model_manager::get_or_create_unaryop.
375 Attempt to fold the inputs and return a simpler svalue *.
376 Otherwise, return NULL. */
377
378const svalue *
379region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
380 const svalue *arg)
381{
382 /* Ops on "unknown" are also unknown. */
383 if (arg->get_kind () == SK_UNKNOWN)
384 return get_or_create_unknown_svalue (type);
a113b143
DM
385 /* Likewise for "poisoned". */
386 else if (const poisoned_svalue *poisoned_sval
387 = arg->dyn_cast_poisoned_svalue ())
388 return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
389 type);
390
391 gcc_assert (arg->can_have_associated_state_p ());
808f4dfe
DM
392
393 switch (op)
394 {
395 default: break;
ecdb9322 396 case VIEW_CONVERT_EXPR:
808f4dfe
DM
397 case NOP_EXPR:
398 {
399 /* Handle redundant casts. */
400 if (arg->get_type ()
401 && useless_type_conversion_p (arg->get_type (), type))
402 return arg;
403
404 /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
405 => "cast<TYPE> (innermost_arg)",
406 unless INNER_TYPE is narrower than TYPE. */
407 if (const svalue *innermost_arg = arg->maybe_undo_cast ())
408 {
409 tree inner_type = arg->get_type ();
410 if (TYPE_SIZE (type)
411 && TYPE_SIZE (inner_type)
412 && (fold_binary (LE_EXPR, boolean_type_node,
413 TYPE_SIZE (type), TYPE_SIZE (inner_type))
414 == boolean_true_node))
415 return maybe_fold_unaryop (type, op, innermost_arg);
416 }
111fd515
DM
417 /* Avoid creating symbolic regions for pointer casts by
418 simplifying (T*)(&REGION) to ((T*)&REGION). */
419 if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
420 if (POINTER_TYPE_P (type)
421 && region_sval->get_type ()
422 && POINTER_TYPE_P (region_sval->get_type ()))
423 return get_ptr_svalue (type, region_sval->get_pointee ());
808f4dfe
DM
424 }
425 break;
426 case TRUTH_NOT_EXPR:
427 {
428 /* Invert comparisons e.g. "!(x == y)" => "x != y". */
429 if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
430 if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
431 {
432 enum tree_code inv_op
433 = invert_tree_comparison (binop->get_op (),
434 HONOR_NANS (binop->get_type ()));
435 if (inv_op != ERROR_MARK)
436 return get_or_create_binop (binop->get_type (), inv_op,
437 binop->get_arg0 (),
438 binop->get_arg1 ());
439 }
440 }
441 break;
7f42f7ad
DM
442 case NEGATE_EXPR:
443 {
444 /* -(-(VAL)) is VAL, for integer types. */
445 if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ())
446 if (unaryop->get_op () == NEGATE_EXPR
447 && type == unaryop->get_type ()
448 && type
449 && INTEGRAL_TYPE_P (type))
450 return unaryop->get_arg ();
451 }
452 break;
808f4dfe
DM
453 }
454
455 /* Constants. */
456 if (tree cst = arg->maybe_get_constant ())
457 if (tree result = fold_unary (op, type, cst))
2aefe248
DM
458 {
459 if (CONSTANT_CLASS_P (result))
460 return get_or_create_constant_svalue (result);
461
462 /* fold_unary can return casts of constants; try to handle them. */
463 if (op != NOP_EXPR
464 && type
465 && TREE_CODE (result) == NOP_EXPR
466 && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
467 {
468 const svalue *inner_cst
469 = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
470 return get_or_create_cast (type,
471 get_or_create_cast (TREE_TYPE (result),
472 inner_cst));
473 }
474 }
808f4dfe
DM
475
476 return NULL;
477}
478
479/* Return the svalue * for an unary operation OP on ARG with a result of
480 type TYPE, creating it if necessary. */
481
482const svalue *
483region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
484 const svalue *arg)
485{
486 if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
487 return folded;
488 unaryop_svalue::key_t key (type, op, arg);
489 if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
490 return *slot;
491 unaryop_svalue *unaryop_sval = new unaryop_svalue (type, op, arg);
492 RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
493 m_unaryop_values_map.put (key, unaryop_sval);
494 return unaryop_sval;
495}
496
ecdb9322
DM
497/* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
498 Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
499 of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
500 and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
501 on. */
502
503static enum tree_code
504get_code_for_cast (tree dst_type, tree src_type)
505{
506 gcc_assert (dst_type);
507 if (!src_type)
508 return NOP_EXPR;
509
510 if (TREE_CODE (src_type) == REAL_TYPE)
511 {
512 if (TREE_CODE (dst_type) == INTEGER_TYPE)
513 return FIX_TRUNC_EXPR;
514 else
515 return VIEW_CONVERT_EXPR;
516 }
517
518 return NOP_EXPR;
519}
520
808f4dfe
DM
521/* Return the svalue * for a cast of ARG to type TYPE, creating it
522 if necessary. */
523
524const svalue *
525region_model_manager::get_or_create_cast (tree type, const svalue *arg)
526{
366bd1ac 527 gcc_assert (type);
45b999f6
DM
528
529 /* No-op if the types are the same. */
530 if (type == arg->get_type ())
531 return arg;
532
533 /* Don't attempt to handle casts involving vector types for now. */
534 if (TREE_CODE (type) == VECTOR_TYPE
535 || (arg->get_type ()
536 && TREE_CODE (arg->get_type ()) == VECTOR_TYPE))
537 return get_or_create_unknown_svalue (type);
538
ecdb9322
DM
539 enum tree_code op = get_code_for_cast (type, arg->get_type ());
540 return get_or_create_unaryop (type, op, arg);
808f4dfe
DM
541}
542
ec3fafa9
DM
543/* Subroutine of region_model_manager::maybe_fold_binop for handling
544 (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
545 optimize_bit_field_compare, where CST is from ARG1.
546
547 Support masking out bits from a compound_svalue for comparing a bitfield
548 against a value, as generated by optimize_bit_field_compare for
549 BITFIELD == VALUE.
550
551 If COMPOUND_SVAL has a value for the appropriate bits, return it,
552 shifted accordingly.
553 Otherwise return NULL. */
554
555const svalue *
556region_model_manager::
557maybe_undo_optimize_bit_field_compare (tree type,
558 const compound_svalue *compound_sval,
559 tree cst,
560 const svalue *arg1)
561{
562 if (type != unsigned_char_type_node)
563 return NULL;
564
565 const binding_map &map = compound_sval->get_map ();
566 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
567 /* If "mask" is a contiguous range of set bits, see if the
568 compound_sval has a value for those bits. */
569 bit_range bits (0, 0);
570 if (!bit_range::from_mask (mask, &bits))
571 return NULL;
572
573 bit_range bound_bits (bits);
574 if (BYTES_BIG_ENDIAN)
575 bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
576 bits.m_size_in_bits);
577 const concrete_binding *conc
e61ffa20 578 = get_store_manager ()->get_concrete_binding (bound_bits);
ec3fafa9
DM
579 const svalue *sval = map.get (conc);
580 if (!sval)
581 return NULL;
582
583 /* We have a value;
584 shift it by the correct number of bits. */
585 const svalue *lhs = get_or_create_cast (type, sval);
586 HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
1aff29d4 587 const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
ec3fafa9
DM
588 const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
589 lhs, shift_sval);
590 /* Reapply the mask (needed for negative
591 signed bitfields). */
592 return get_or_create_binop (type, BIT_AND_EXPR,
593 shifted_sval, arg1);
594}
595
808f4dfe
DM
596/* Subroutine of region_model_manager::get_or_create_binop.
597 Attempt to fold the inputs and return a simpler svalue *.
598 Otherwise, return NULL. */
599
600const svalue *
601region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
602 const svalue *arg0,
603 const svalue *arg1)
604{
605 tree cst0 = arg0->maybe_get_constant ();
606 tree cst1 = arg1->maybe_get_constant ();
607 /* (CST OP CST). */
608 if (cst0 && cst1)
609 {
610 if (tree result = fold_binary (op, type, cst0, cst1))
611 if (CONSTANT_CLASS_P (result))
612 return get_or_create_constant_svalue (result);
613 }
614
615 if (FLOAT_TYPE_P (type)
616 || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
617 || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
618 return NULL;
619
620 switch (op)
621 {
622 default:
623 break;
624 case POINTER_PLUS_EXPR:
625 case PLUS_EXPR:
626 /* (VAL + 0) -> VAL. */
3a32fb2e
DM
627 if (cst1 && zerop (cst1))
628 return get_or_create_cast (type, arg0);
808f4dfe
DM
629 break;
630 case MINUS_EXPR:
631 /* (VAL - 0) -> VAL. */
3a32fb2e
DM
632 if (cst1 && zerop (cst1))
633 return get_or_create_cast (type, arg0);
0b737090
DM
634 /* (0 - VAL) -> -VAL. */
635 if (cst0 && zerop (cst0))
636 return get_or_create_unaryop (type, NEGATE_EXPR, arg1);
808f4dfe
DM
637 break;
638 case MULT_EXPR:
639 /* (VAL * 0). */
fc02b568 640 if (cst1 && zerop (cst1) && INTEGRAL_TYPE_P (type))
808f4dfe
DM
641 return get_or_create_constant_svalue (build_int_cst (type, 0));
642 /* (VAL * 1) -> VAL. */
643 if (cst1 && integer_onep (cst1))
644 return arg0;
645 break;
df2b78d4
DM
646 case BIT_AND_EXPR:
647 if (cst1)
d3b1ef7a
DM
648 {
649 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
650 /* "(ARG0 & 0)" -> "0". */
651 return get_or_create_constant_svalue (build_int_cst (type, 0));
652
d3b1ef7a
DM
653 if (const compound_svalue *compound_sval
654 = arg0->dyn_cast_compound_svalue ())
ec3fafa9
DM
655 if (const svalue *sval
656 = maybe_undo_optimize_bit_field_compare (type,
657 compound_sval,
658 cst1, arg1))
659 return sval;
d3b1ef7a 660 }
4f34f8cc
DM
661 if (arg0->get_type () == boolean_type_node
662 && arg1->get_type () == boolean_type_node)
663 {
664 /* If the LHS are both _Bool, then... */
665 /* ..."(1 & x) -> x". */
666 if (cst0 && !zerop (cst0))
667 return get_or_create_cast (type, arg1);
668 /* ..."(x & 1) -> x". */
669 if (cst1 && !zerop (cst1))
670 return get_or_create_cast (type, arg0);
671 /* ..."(0 & x) -> 0". */
672 if (cst0 && zerop (cst0))
673 return get_or_create_int_cst (type, 0);
674 /* ..."(x & 0) -> 0". */
675 if (cst1 && zerop (cst1))
676 return get_or_create_int_cst (type, 0);
677 }
678 break;
679 case BIT_IOR_EXPR:
680 if (arg0->get_type () == boolean_type_node
681 && arg1->get_type () == boolean_type_node)
682 {
683 /* If the LHS are both _Bool, then... */
684 /* ..."(1 | x) -> 1". */
685 if (cst0 && !zerop (cst0))
686 return get_or_create_int_cst (type, 1);
687 /* ..."(x | 1) -> 1". */
688 if (cst1 && !zerop (cst1))
689 return get_or_create_int_cst (type, 1);
690 /* ..."(0 | x) -> x". */
691 if (cst0 && zerop (cst0))
692 return get_or_create_cast (type, arg1);
693 /* ..."(x | 0) -> x". */
694 if (cst1 && zerop (cst1))
695 return get_or_create_cast (type, arg0);
696 }
df2b78d4 697 break;
808f4dfe
DM
698 case TRUTH_ANDIF_EXPR:
699 case TRUTH_AND_EXPR:
700 if (cst1)
701 {
fc02b568 702 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
808f4dfe
DM
703 /* "(ARG0 && 0)" -> "0". */
704 return get_or_create_constant_svalue (build_int_cst (type, 0));
705 else
706 /* "(ARG0 && nonzero-cst)" -> "ARG0". */
707 return get_or_create_cast (type, arg0);
708 }
709 break;
710 case TRUTH_ORIF_EXPR:
711 case TRUTH_OR_EXPR:
712 if (cst1)
713 {
714 if (zerop (cst1))
715 /* "(ARG0 || 0)" -> "ARG0". */
716 return get_or_create_cast (type, arg0);
717 else
718 /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
719 return get_or_create_cast (type, arg1);
720 }
721 break;
722 }
723
724 /* For associative ops, fold "(X op CST_A) op CST_B)" to
725 "X op (CST_A op CST_B)". */
726 if (cst1 && associative_tree_code (op))
727 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
728 if (binop->get_op () == op
729 && binop->get_arg1 ()->maybe_get_constant ()
730 && type == binop->get_type ()
731 && type == binop->get_arg0 ()->get_type ()
732 && type == binop->get_arg1 ()->get_type ())
733 return get_or_create_binop
734 (type, op, binop->get_arg0 (),
735 get_or_create_binop (type, op,
736 binop->get_arg1 (), arg1));
737
738 /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
739 can fold:
740 "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
741 e.g. in data-model-1.c: test_4c. */
742 if (cst1 && op == POINTER_PLUS_EXPR)
743 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
744 if (binop->get_op () == POINTER_PLUS_EXPR)
745 if (binop->get_arg1 ()->maybe_get_constant ())
746 return get_or_create_binop
747 (type, op, binop->get_arg0 (),
748 get_or_create_binop (size_type_node, op,
749 binop->get_arg1 (), arg1));
750
808f4dfe
DM
751 /* etc. */
752
753 return NULL;
754}
755
756/* Return the svalue * for an binary operation OP on ARG0 and ARG1
757 with a result of type TYPE, creating it if necessary. */
758
759const svalue *
760region_model_manager::get_or_create_binop (tree type, enum tree_code op,
761 const svalue *arg0,
762 const svalue *arg1)
763{
764 /* For commutative ops, put any constant on the RHS. */
765 if (arg0->maybe_get_constant () && commutative_tree_code (op))
766 std::swap (arg0, arg1);
767
768 if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
769 return folded;
770
a113b143
DM
771 /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
772 it via an identity in maybe_fold_binop). */
773 if (!arg0->can_have_associated_state_p ()
774 || !arg1->can_have_associated_state_p ())
775 return get_or_create_unknown_svalue (type);
776
808f4dfe
DM
777 binop_svalue::key_t key (type, op, arg0, arg1);
778 if (binop_svalue **slot = m_binop_values_map.get (key))
779 return *slot;
780 binop_svalue *binop_sval = new binop_svalue (type, op, arg0, arg1);
781 RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
782 m_binop_values_map.put (key, binop_sval);
783 return binop_sval;
784}
785
786/* Subroutine of region_model_manager::get_or_create_sub_svalue.
787 Return a folded svalue, or NULL. */
788
789const svalue *
790region_model_manager::maybe_fold_sub_svalue (tree type,
791 const svalue *parent_svalue,
792 const region *subregion)
793{
a113b143
DM
794 /* Subvalues of "unknown"/"poisoned" are unknown. */
795 if (!parent_svalue->can_have_associated_state_p ())
808f4dfe
DM
796 return get_or_create_unknown_svalue (type);
797
798 /* If we have a subregion of a zero-fill, it's zero. */
799 if (const unaryop_svalue *unary
800 = parent_svalue->dyn_cast_unaryop_svalue ())
801 {
ecdb9322
DM
802 if (unary->get_op () == NOP_EXPR
803 || unary->get_op () == VIEW_CONVERT_EXPR)
808f4dfe 804 if (tree cst = unary->get_arg ()->maybe_get_constant ())
84832cab 805 if (zerop (cst) && type)
808f4dfe
DM
806 {
807 const svalue *cst_sval
808 = get_or_create_constant_svalue (cst);
809 return get_or_create_cast (type, cst_sval);
810 }
811 }
812
813 /* Handle getting individual chars from a STRING_CST. */
814 if (tree cst = parent_svalue->maybe_get_constant ())
815 if (TREE_CODE (cst) == STRING_CST)
2ac7b19f
DM
816 {
817 /* If we have a concrete 1-byte access within the parent region... */
818 byte_range subregion_bytes (0, 0);
819 if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
84832cab
DM
820 && subregion_bytes.m_size_in_bytes == 1
821 && type)
2ac7b19f
DM
822 {
823 /* ...then attempt to get that char from the STRING_CST. */
824 HOST_WIDE_INT hwi_start_byte
825 = subregion_bytes.m_start_byte_offset.to_shwi ();
826 tree cst_idx
827 = build_int_cst_type (size_type_node, hwi_start_byte);
808f4dfe
DM
828 if (const svalue *char_sval
829 = maybe_get_char_from_string_cst (cst, cst_idx))
830 return get_or_create_cast (type, char_sval);
2ac7b19f
DM
831 }
832 }
808f4dfe 833
808f4dfe 834 if (const initial_svalue *init_sval
e61ffa20 835 = parent_svalue->dyn_cast_initial_svalue ())
808f4dfe 836 {
e61ffa20
DM
837 /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
838 i.e.
839 Subvalue(InitialValue(R1), FieldRegion(R2, F))
840 -> InitialValue(FieldRegion(R1, F)). */
808f4dfe
DM
841 if (const field_region *field_reg = subregion->dyn_cast_field_region ())
842 {
843 const region *field_reg_new
844 = get_field_region (init_sval->get_region (),
845 field_reg->get_field ());
846 return get_or_create_initial_value (field_reg_new);
847 }
e61ffa20
DM
848 /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
849 i.e.
850 Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
851 -> InitialValue(ElementRegion(R1, IDX)). */
852 if (const element_region *element_reg = subregion->dyn_cast_element_region ())
853 {
854 const region *element_reg_new
855 = get_element_region (init_sval->get_region (),
856 element_reg->get_type (),
857 element_reg->get_index ());
858 return get_or_create_initial_value (element_reg_new);
859 }
808f4dfe
DM
860 }
861
e61ffa20
DM
862 if (const repeated_svalue *repeated_sval
863 = parent_svalue->dyn_cast_repeated_svalue ())
79e746bb
DM
864 if (type)
865 return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
e61ffa20 866
808f4dfe
DM
867 return NULL;
868}
869
870/* Return the svalue * for extracting a subvalue of type TYPE from
871 PARENT_SVALUE based on SUBREGION, creating it if necessary. */
872
873const svalue *
874region_model_manager::get_or_create_sub_svalue (tree type,
875 const svalue *parent_svalue,
876 const region *subregion)
877{
878 if (const svalue *folded
879 = maybe_fold_sub_svalue (type, parent_svalue, subregion))
880 return folded;
881
882 sub_svalue::key_t key (type, parent_svalue, subregion);
883 if (sub_svalue **slot = m_sub_values_map.get (key))
884 return *slot;
885 sub_svalue *sub_sval
886 = new sub_svalue (type, parent_svalue, subregion);
887 RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
888 m_sub_values_map.put (key, sub_sval);
889 return sub_sval;
890}
891
e61ffa20
DM
892/* Subroutine of region_model_manager::get_or_create_repeated_svalue.
893 Return a folded svalue, or NULL. */
894
895const svalue *
896region_model_manager::maybe_fold_repeated_svalue (tree type,
897 const svalue *outer_size,
898 const svalue *inner_svalue)
899{
a113b143
DM
900 /* Repeated "unknown"/"poisoned" is unknown. */
901 if (!outer_size->can_have_associated_state_p ()
902 || !inner_svalue->can_have_associated_state_p ())
903 return get_or_create_unknown_svalue (type);
904
e61ffa20
DM
905 /* If INNER_SVALUE is the same size as OUTER_SIZE,
906 turn into simply a cast. */
907 if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
908 {
909 HOST_WIDE_INT num_bytes_inner_svalue
910 = int_size_in_bytes (inner_svalue->get_type ());
911 if (num_bytes_inner_svalue != -1)
912 if (num_bytes_inner_svalue
913 == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
914 {
915 if (type)
916 return get_or_create_cast (type, inner_svalue);
917 else
918 return inner_svalue;
919 }
920 }
921
922 /* Handle zero-fill of a specific type. */
923 if (tree cst = inner_svalue->maybe_get_constant ())
924 if (zerop (cst) && type)
925 return get_or_create_cast (type, inner_svalue);
926
927 return NULL;
928}
929
930/* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
931 enough times to be of size OUTER_SIZE, creating it if necessary.
932 e.g. for filling buffers with a constant value. */
933
934const svalue *
935region_model_manager::get_or_create_repeated_svalue (tree type,
936 const svalue *outer_size,
937 const svalue *inner_svalue)
938{
939 if (const svalue *folded
940 = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
941 return folded;
942
943 repeated_svalue::key_t key (type, outer_size, inner_svalue);
944 if (repeated_svalue **slot = m_repeated_values_map.get (key))
945 return *slot;
946 repeated_svalue *repeated_sval
947 = new repeated_svalue (type, outer_size, inner_svalue);
948 RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
949 m_repeated_values_map.put (key, repeated_sval);
950 return repeated_sval;
951}
952
953/* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
954 Return true and write the result to OUT if successful.
955 Return false otherwise. */
956
957static bool
958get_bit_range_for_field (tree field, bit_range *out)
959{
960 bit_size_t bit_size;
961 if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
962 return false;
963 int field_bit_offset = int_bit_position (field);
964 *out = bit_range (field_bit_offset, bit_size);
965 return true;
966}
967
968/* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
969 Return true and write the result to OUT if successful.
970 Return false otherwise. */
971
972static bool
973get_byte_range_for_field (tree field, byte_range *out)
974{
975 bit_range field_bits (0, 0);
976 if (!get_bit_range_for_field (field, &field_bits))
977 return false;
978 return field_bits.as_byte_range (out);
979}
980
981/* Attempt to determine if there is a specific field within RECORD_TYPE
982 at BYTES. If so, return it, and write the location of BYTES relative
983 to the field to *OUT_RANGE_WITHIN_FIELD.
984 Otherwise, return NULL_TREE.
985 For example, given:
986 struct foo { uint32 a; uint32; b};
987 and
988 bytes = {bytes 6-7} (of foo)
989 we have bytes 3-4 of field b. */
990
991static tree
992get_field_at_byte_range (tree record_type, const byte_range &bytes,
993 byte_range *out_range_within_field)
994{
995 bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
996
997 tree field = get_field_at_bit_offset (record_type, bit_offset);
998 if (!field)
999 return NULL_TREE;
1000
1001 byte_range field_bytes (0,0);
1002 if (!get_byte_range_for_field (field, &field_bytes))
1003 return NULL_TREE;
1004
1005 /* Is BYTES fully within field_bytes? */
1006 byte_range bytes_within_field (0,0);
1007 if (!field_bytes.contains_p (bytes, &bytes_within_field))
1008 return NULL_TREE;
1009
1010 *out_range_within_field = bytes_within_field;
1011 return field;
1012}
1013
1014/* Subroutine of region_model_manager::get_or_create_bits_within.
1015 Return a folded svalue, or NULL. */
1016
1017const svalue *
1018region_model_manager::maybe_fold_bits_within_svalue (tree type,
1019 const bit_range &bits,
1020 const svalue *inner_svalue)
1021{
1022 tree inner_type = inner_svalue->get_type ();
1023 /* Fold:
1024 BITS_WITHIN ((0, sizeof (VAL), VAL))
1025 to:
1026 CAST(TYPE, VAL). */
1027 if (bits.m_start_bit_offset == 0 && inner_type)
1028 {
1029 bit_size_t inner_type_size;
1030 if (int_size_in_bits (inner_type, &inner_type_size))
1031 if (inner_type_size == bits.m_size_in_bits)
1032 {
1033 if (type)
1034 return get_or_create_cast (type, inner_svalue);
1035 else
1036 return inner_svalue;
1037 }
1038 }
1039
1040 /* Kind-specific folding. */
1041 if (const svalue *sval
1042 = inner_svalue->maybe_fold_bits_within (type, bits, this))
1043 return sval;
1044
1045 byte_range bytes (0,0);
1046 if (bits.as_byte_range (&bytes) && inner_type)
1047 switch (TREE_CODE (inner_type))
1048 {
1049 default:
1050 break;
1051 case ARRAY_TYPE:
1052 {
1053 /* Fold:
1054 BITS_WITHIN (range, KIND(REG))
1055 to:
1056 BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1057 if range1 is a byte-range fully within one ELEMENT. */
1058 tree element_type = TREE_TYPE (inner_type);
1059 HOST_WIDE_INT element_byte_size
1060 = int_size_in_bytes (element_type);
1061 if (element_byte_size > 0)
1062 {
1063 HOST_WIDE_INT start_idx
1064 = (bytes.get_start_byte_offset ().to_shwi ()
1065 / element_byte_size);
1066 HOST_WIDE_INT last_idx
1067 = (bytes.get_last_byte_offset ().to_shwi ()
1068 / element_byte_size);
1069 if (start_idx == last_idx)
1070 {
1071 if (const initial_svalue *initial_sval
1072 = inner_svalue->dyn_cast_initial_svalue ())
1073 {
1074 bit_offset_t start_of_element
1075 = start_idx * element_byte_size * BITS_PER_UNIT;
1076 bit_range bits_within_element
1077 (bits.m_start_bit_offset - start_of_element,
1078 bits.m_size_in_bits);
1079 const svalue *idx_sval
1080 = get_or_create_int_cst (integer_type_node, start_idx);
1081 const region *element_reg =
1082 get_element_region (initial_sval->get_region (),
1083 element_type, idx_sval);
1084 const svalue *element_reg_sval
1085 = get_or_create_initial_value (element_reg);
1086 return get_or_create_bits_within (type,
1087 bits_within_element,
1088 element_reg_sval);
1089 }
1090 }
1091 }
1092 }
1093 break;
1094 case RECORD_TYPE:
1095 {
1096 /* Fold:
1097 BYTES_WITHIN (range, KIND(REG))
1098 to:
1099 BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1100 if range1 is fully within FIELD. */
1101 byte_range bytes_within_field (0, 0);
1102 if (tree field = get_field_at_byte_range (inner_type, bytes,
1103 &bytes_within_field))
1104 {
1105 if (const initial_svalue *initial_sval
1106 = inner_svalue->dyn_cast_initial_svalue ())
1107 {
1108 const region *field_reg =
1109 get_field_region (initial_sval->get_region (), field);
1110 const svalue *initial_reg_sval
1111 = get_or_create_initial_value (field_reg);
1112 return get_or_create_bits_within
1113 (type,
1114 bytes_within_field.as_bit_range (),
1115 initial_reg_sval);
1116 }
1117 }
1118 }
1119 break;
1120 }
1121 return NULL;
1122}
1123
1124/* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1125 creating it if necessary. */
1126
1127const svalue *
1128region_model_manager::get_or_create_bits_within (tree type,
1129 const bit_range &bits,
1130 const svalue *inner_svalue)
1131{
1132 if (const svalue *folded
1133 = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1134 return folded;
1135
1136 bits_within_svalue::key_t key (type, bits, inner_svalue);
1137 if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1138 return *slot;
1139 bits_within_svalue *bits_within_sval
1140 = new bits_within_svalue (type, bits, inner_svalue);
1141 RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1142 m_bits_within_values_map.put (key, bits_within_sval);
1143 return bits_within_sval;
1144}
1145
808f4dfe
DM
1146/* Return the svalue * that decorates ARG as being unmergeable,
1147 creating it if necessary. */
1148
1149const svalue *
1150region_model_manager::get_or_create_unmergeable (const svalue *arg)
1151{
1152 if (arg->get_kind () == SK_UNMERGEABLE)
1153 return arg;
1154
1155 if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1156 return *slot;
1157 unmergeable_svalue *unmergeable_sval = new unmergeable_svalue (arg);
1158 RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1159 m_unmergeable_values_map.put (arg, unmergeable_sval);
1160 return unmergeable_sval;
1161}
1162
1163/* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1164 and ITER_SVAL at POINT, creating it if necessary. */
1165
1166const svalue *
e6fe02d8
DM
1167region_model_manager::
1168get_or_create_widening_svalue (tree type,
1169 const function_point &point,
1170 const svalue *base_sval,
1171 const svalue *iter_sval)
808f4dfe 1172{
2fc20138
DM
1173 gcc_assert (base_sval->get_kind () != SK_WIDENING);
1174 gcc_assert (iter_sval->get_kind () != SK_WIDENING);
808f4dfe
DM
1175 widening_svalue::key_t key (type, point, base_sval, iter_sval);
1176 if (widening_svalue **slot = m_widening_values_map.get (key))
1177 return *slot;
1178 widening_svalue *widening_sval
1179 = new widening_svalue (type, point, base_sval, iter_sval);
1180 RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1181 m_widening_values_map.put (key, widening_sval);
1182 return widening_sval;
1183}
1184
1185/* Return the svalue * of type TYPE for the compound values in MAP,
1186 creating it if necessary. */
1187
1188const svalue *
1189region_model_manager::get_or_create_compound_svalue (tree type,
1190 const binding_map &map)
1191{
1192 compound_svalue::key_t tmp_key (type, &map);
1193 if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1194 return *slot;
1195 compound_svalue *compound_sval
1196 = new compound_svalue (type, map);
1197 RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1198 /* Use make_key rather than reusing the key, so that we use a
1199 ptr to compound_sval's binding_map, rather than the MAP param. */
1200 m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1201 return compound_sval;
1202}
1203
3734527d
DM
1204/* class conjured_purge. */
1205
1206/* Purge state relating to SVAL. */
1207
1208void
1209conjured_purge::purge (const conjured_svalue *sval) const
1210{
1211 m_model->purge_state_involving (sval, m_ctxt);
1212}
1213
808f4dfe 1214/* Return the svalue * of type TYPE for the value conjured for ID_REG
3734527d
DM
1215 at STMT, creating it if necessary.
1216 Use P to purge existing state from the svalue, for the case where a
1217 conjured_svalue would be reused along an execution path. */
808f4dfe
DM
1218
1219const svalue *
1220region_model_manager::get_or_create_conjured_svalue (tree type,
1221 const gimple *stmt,
3734527d
DM
1222 const region *id_reg,
1223 const conjured_purge &p)
808f4dfe
DM
1224{
1225 conjured_svalue::key_t key (type, stmt, id_reg);
1226 if (conjured_svalue **slot = m_conjured_values_map.get (key))
3734527d
DM
1227 {
1228 const conjured_svalue *sval = *slot;
1229 /* We're reusing an existing conjured_svalue, perhaps from a different
1230 state within this analysis, or perhaps from an earlier state on this
1231 execution path. For the latter, purge any state involving the "new"
1232 svalue from the current program_state. */
1233 p.purge (sval);
1234 return sval;
1235 }
808f4dfe
DM
1236 conjured_svalue *conjured_sval
1237 = new conjured_svalue (type, stmt, id_reg);
1238 RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1239 m_conjured_values_map.put (key, conjured_sval);
1240 return conjured_sval;
1241}
1242
ded2c2c0
DM
1243/* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1244 Return a folded svalue, or NULL. */
1245
1246const svalue *
1247region_model_manager::
1248maybe_fold_asm_output_svalue (tree type,
1249 const vec<const svalue *> &inputs)
1250{
1251 /* Unknown inputs should lead to unknown results. */
1252 for (const auto &iter : inputs)
1253 if (iter->get_kind () == SK_UNKNOWN)
1254 return get_or_create_unknown_svalue (type);
1255
1256 return NULL;
1257}
1258
1259/* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1260 asm stmt ASM_STMT, given INPUTS as inputs. */
1261
1262const svalue *
1263region_model_manager::
1264get_or_create_asm_output_svalue (tree type,
1265 const gasm *asm_stmt,
1266 unsigned output_idx,
1267 const vec<const svalue *> &inputs)
1268{
1269 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1270
1271 if (const svalue *folded
1272 = maybe_fold_asm_output_svalue (type, inputs))
1273 return folded;
1274
1275 const char *asm_string = gimple_asm_string (asm_stmt);
1276 const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1277
1278 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1279 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1280 return *slot;
1281 asm_output_svalue *asm_output_sval
1282 = new asm_output_svalue (type, asm_string, output_idx, noutputs, inputs);
1283 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1284 m_asm_output_values_map.put (key, asm_output_sval);
1285 return asm_output_sval;
1286}
1287
bfca9505
DM
1288/* Return the svalue * of type TYPE for OUTPUT_IDX of a deterministic
1289 asm stmt with string ASM_STRING with NUM_OUTPUTS outputs, given
1290 INPUTS as inputs. */
1291
1292const svalue *
1293region_model_manager::
1294get_or_create_asm_output_svalue (tree type,
1295 const char *asm_string,
1296 unsigned output_idx,
1297 unsigned num_outputs,
1298 const vec<const svalue *> &inputs)
1299{
1300 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1301
1302 if (const svalue *folded
1303 = maybe_fold_asm_output_svalue (type, inputs))
1304 return folded;
1305
1306 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1307 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1308 return *slot;
1309 asm_output_svalue *asm_output_sval
1310 = new asm_output_svalue (type, asm_string, output_idx, num_outputs, inputs);
1311 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1312 m_asm_output_values_map.put (key, asm_output_sval);
1313 return asm_output_sval;
1314}
aee1adf2
DM
1315
1316/* Return the svalue * of type TYPE for the result of a call to FNDECL
1317 with __attribute__((const)), given INPUTS as inputs. */
1318
1319const svalue *
1320region_model_manager::
1321get_or_create_const_fn_result_svalue (tree type,
1322 tree fndecl,
1323 const vec<const svalue *> &inputs)
1324{
1325 gcc_assert (type);
1326 gcc_assert (fndecl);
1327 gcc_assert (DECL_P (fndecl));
1328 gcc_assert (TREE_READONLY (fndecl));
1329 gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1330
1331 const_fn_result_svalue::key_t key (type, fndecl, inputs);
1332 if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1333 return *slot;
1334 const_fn_result_svalue *const_fn_result_sval
1335 = new const_fn_result_svalue (type, fndecl, inputs);
1336 RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1337 m_const_fn_result_values_map.put (key, const_fn_result_sval);
1338 return const_fn_result_sval;
1339}
1340
808f4dfe
DM
1341/* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1342 attempt to get the character at that offset, returning either
1343 the svalue for the character constant, or NULL if unsuccessful. */
1344
1345const svalue *
1346region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1347 tree byte_offset_cst)
1348{
1349 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1350
1351 /* Adapted from fold_read_from_constant_string. */
1352 scalar_int_mode char_mode;
1353 if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1354 && compare_tree_int (byte_offset_cst,
1355 TREE_STRING_LENGTH (string_cst)) < 0
1356 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1357 &char_mode)
1358 && GET_MODE_SIZE (char_mode) == 1)
1359 {
1360 tree char_cst
1361 = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)),
1362 (TREE_STRING_POINTER (string_cst)
1363 [TREE_INT_CST_LOW (byte_offset_cst)]));
1364 return get_or_create_constant_svalue (char_cst);
1365 }
1366 return NULL;
1367}
1368
1369/* region consolidation. */
1370
1371/* Return the region for FNDECL, creating it if necessary. */
1372
1373const function_region *
1374region_model_manager::get_region_for_fndecl (tree fndecl)
1375{
1376 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1377
1378 function_region **slot = m_fndecls_map.get (fndecl);
1379 if (slot)
1380 return *slot;
1381 function_region *reg
1382 = new function_region (alloc_region_id (), &m_code_region, fndecl);
1383 m_fndecls_map.put (fndecl, reg);
1384 return reg;
1385}
1386
1387/* Return the region for LABEL, creating it if necessary. */
1388
1389const label_region *
1390region_model_manager::get_region_for_label (tree label)
1391{
1392 gcc_assert (TREE_CODE (label) == LABEL_DECL);
1393
1394 label_region **slot = m_labels_map.get (label);
1395 if (slot)
1396 return *slot;
1397
1398 tree fndecl = DECL_CONTEXT (label);
1399 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1400
1401 const function_region *func_reg = get_region_for_fndecl (fndecl);
1402 label_region *reg
1403 = new label_region (alloc_region_id (), func_reg, label);
1404 m_labels_map.put (label, reg);
1405 return reg;
1406}
1407
1408/* Return the region for EXPR, creating it if necessary. */
1409
1410const decl_region *
1411region_model_manager::get_region_for_global (tree expr)
1412{
1413 gcc_assert (TREE_CODE (expr) == VAR_DECL);
1414
1415 decl_region **slot = m_globals_map.get (expr);
1416 if (slot)
1417 return *slot;
1418 decl_region *reg
1419 = new decl_region (alloc_region_id (), &m_globals_region, expr);
1420 m_globals_map.put (expr, reg);
1421 return reg;
1422}
1423
3d41408c
DM
1424/* Return the region for an unknown access of type REGION_TYPE,
1425 creating it if necessary.
1426 This is a symbolic_region, where the pointer is an unknown_svalue
1427 of type &REGION_TYPE. */
1428
1429const region *
1430region_model_manager::get_unknown_symbolic_region (tree region_type)
1431{
1432 tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1433 const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1434 return get_symbolic_region (unknown_ptr);
1435}
1436
808f4dfe
DM
1437/* Return the region that describes accessing field FIELD of PARENT,
1438 creating it if necessary. */
1439
1440const region *
1441region_model_manager::get_field_region (const region *parent, tree field)
1442{
00cb0f58
DM
1443 gcc_assert (TREE_CODE (field) == FIELD_DECL);
1444
11d4ec5d
DM
1445 /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1446 if (parent->symbolic_for_unknown_ptr_p ())
3d41408c 1447 return get_unknown_symbolic_region (TREE_TYPE (field));
11d4ec5d 1448
808f4dfe
DM
1449 field_region::key_t key (parent, field);
1450 if (field_region *reg = m_field_regions.get (key))
1451 return reg;
1452
1453 field_region *field_reg
1454 = new field_region (alloc_region_id (), parent, field);
1455 m_field_regions.put (key, field_reg);
1456 return field_reg;
1457}
1458
1459/* Return the region that describes accessing the element of type
1460 ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1461
1462const region *
1463region_model_manager::get_element_region (const region *parent,
1464 tree element_type,
1465 const svalue *index)
1466{
3d41408c
DM
1467 /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1468 if (parent->symbolic_for_unknown_ptr_p ())
1469 return get_unknown_symbolic_region (element_type);
1470
808f4dfe
DM
1471 element_region::key_t key (parent, element_type, index);
1472 if (element_region *reg = m_element_regions.get (key))
1473 return reg;
1474
1475 element_region *element_reg
1476 = new element_region (alloc_region_id (), parent, element_type, index);
1477 m_element_regions.put (key, element_reg);
1478 return element_reg;
1479}
1480
1481/* Return the region that describes accessing the subregion of type
1482 ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1483 necessary. */
1484
1485const region *
1486region_model_manager::get_offset_region (const region *parent,
1487 tree type,
1488 const svalue *byte_offset)
1489{
3d41408c
DM
1490 /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1491 if (parent->symbolic_for_unknown_ptr_p ())
1492 return get_unknown_symbolic_region (type);
1493
808f4dfe
DM
1494 /* If BYTE_OFFSET is zero, return PARENT. */
1495 if (tree cst_offset = byte_offset->maybe_get_constant ())
1496 if (zerop (cst_offset))
1497 return get_cast_region (parent, type);
1498
1499 /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1500 to OFFSET_REGION(REG, (X + Y)). */
1501 if (const offset_region *parent_offset_reg
1502 = parent->dyn_cast_offset_region ())
1503 {
1504 const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1505 const svalue *sval_sum
1506 = get_or_create_binop (byte_offset->get_type (),
1507 PLUS_EXPR, sval_x, byte_offset);
1508 return get_offset_region (parent->get_parent_region (), type, sval_sum);
1509 }
1510
1511 offset_region::key_t key (parent, type, byte_offset);
1512 if (offset_region *reg = m_offset_regions.get (key))
1513 return reg;
1514
1515 offset_region *offset_reg
1516 = new offset_region (alloc_region_id (), parent, type, byte_offset);
1517 m_offset_regions.put (key, offset_reg);
1518 return offset_reg;
1519}
1520
e61ffa20
DM
1521/* Return the region that describes accessing the subregion of type
1522 TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1523
1524const region *
1525region_model_manager::get_sized_region (const region *parent,
1526 tree type,
1527 const svalue *byte_size_sval)
1528{
3d41408c
DM
1529 if (parent->symbolic_for_unknown_ptr_p ())
1530 return get_unknown_symbolic_region (type);
1531
e61ffa20
DM
1532 if (byte_size_sval->get_type () != size_type_node)
1533 byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1534
1535 /* If PARENT is already that size, return it. */
1536 const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1537 if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1538 if (tree size_cst = byte_size_sval->maybe_get_constant ())
1539 {
1540 tree comparison
1541 = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1542 if (comparison == boolean_true_node)
1543 return parent;
1544 }
1545
1546 sized_region::key_t key (parent, type, byte_size_sval);
1547 if (sized_region *reg = m_sized_regions.get (key))
1548 return reg;
1549
1550 sized_region *sized_reg
1551 = new sized_region (alloc_region_id (), parent, type, byte_size_sval);
1552 m_sized_regions.put (key, sized_reg);
1553 return sized_reg;
1554}
1555
808f4dfe
DM
1556/* Return the region that describes accessing PARENT_REGION as if
1557 it were of type TYPE, creating it if necessary. */
1558
1559const region *
1560region_model_manager::get_cast_region (const region *original_region,
1561 tree type)
1562{
1563 /* If types match, return ORIGINAL_REGION. */
1564 if (type == original_region->get_type ())
1565 return original_region;
1566
3d41408c
DM
1567 if (original_region->symbolic_for_unknown_ptr_p ())
1568 return get_unknown_symbolic_region (type);
1569
808f4dfe
DM
1570 cast_region::key_t key (original_region, type);
1571 if (cast_region *reg = m_cast_regions.get (key))
1572 return reg;
1573
1574 cast_region *cast_reg
1575 = new cast_region (alloc_region_id (), original_region, type);
1576 m_cast_regions.put (key, cast_reg);
1577 return cast_reg;
1578}
1579
1580/* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1581 if necessary. CALLING_FRAME may be NULL. */
1582
1583const frame_region *
1584region_model_manager::get_frame_region (const frame_region *calling_frame,
1585 function *fun)
1586{
1587 int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1588
1589 frame_region::key_t key (calling_frame, fun);
1590 if (frame_region *reg = m_frame_regions.get (key))
1591 return reg;
1592
1593 frame_region *frame_reg
1594 = new frame_region (alloc_region_id (), &m_stack_region, calling_frame,
1595 fun, index);
1596 m_frame_regions.put (key, frame_reg);
1597 return frame_reg;
1598}
1599
1600/* Return the region that describes dereferencing SVAL, creating it
1601 if necessary. */
1602
1603const region *
1604region_model_manager::get_symbolic_region (const svalue *sval)
1605{
1606 symbolic_region::key_t key (&m_root_region, sval);
1607 if (symbolic_region *reg = m_symbolic_regions.get (key))
1608 return reg;
1609
1610 symbolic_region *symbolic_reg
1611 = new symbolic_region (alloc_region_id (), &m_root_region, sval);
1612 m_symbolic_regions.put (key, symbolic_reg);
1613 return symbolic_reg;
1614}
1615
1616/* Return the region that describes accessing STRING_CST, creating it
1617 if necessary. */
1618
1619const string_region *
1620region_model_manager::get_region_for_string (tree string_cst)
1621{
1622 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1623
1624 string_region **slot = m_string_map.get (string_cst);
1625 if (slot)
1626 return *slot;
1627 string_region *reg
1628 = new string_region (alloc_region_id (), &m_root_region, string_cst);
1629 m_string_map.put (string_cst, reg);
1630 return reg;
1631}
1632
93e759fc
DM
1633/* Return the region that describes accessing BITS within PARENT as TYPE,
1634 creating it if necessary. */
1635
1636const region *
1637region_model_manager::get_bit_range (const region *parent, tree type,
1638 const bit_range &bits)
1639{
1640 gcc_assert (parent);
1641
3d41408c
DM
1642 if (parent->symbolic_for_unknown_ptr_p ())
1643 return get_unknown_symbolic_region (type);
1644
93e759fc
DM
1645 bit_range_region::key_t key (parent, type, bits);
1646 if (bit_range_region *reg = m_bit_range_regions.get (key))
1647 return reg;
1648
1649 bit_range_region *bit_range_reg
1650 = new bit_range_region (alloc_region_id (), parent, type, bits);
1651 m_bit_range_regions.put (key, bit_range_reg);
1652 return bit_range_reg;
1653}
1654
2402dc6b
DM
1655/* Return the region that describes accessing the IDX-th variadic argument
1656 within PARENT_FRAME, creating it if necessary. */
1657
1658const var_arg_region *
1659region_model_manager::get_var_arg_region (const frame_region *parent_frame,
1660 unsigned idx)
1661{
1662 gcc_assert (parent_frame);
1663
1664 var_arg_region::key_t key (parent_frame, idx);
1665 if (var_arg_region *reg = m_var_arg_regions.get (key))
1666 return reg;
1667
1668 var_arg_region *var_arg_reg
1669 = new var_arg_region (alloc_region_id (), parent_frame, idx);
1670 m_var_arg_regions.put (key, var_arg_reg);
1671 return var_arg_reg;
1672}
1673
808f4dfe
DM
1674/* If we see a tree code we don't know how to handle, rather than
1675 ICE or generate bogus results, create a dummy region, and notify
1676 CTXT so that it can mark the new state as being not properly
1677 modelled. The exploded graph can then stop exploring that path,
1678 since any diagnostics we might issue will have questionable
1679 validity. */
1680
1681const region *
1682region_model_manager::
1683get_region_for_unexpected_tree_code (region_model_context *ctxt,
1684 tree t,
1685 const dump_location_t &loc)
1686{
808f4dfe
DM
1687 tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1688 region *new_reg
1689 = new unknown_region (alloc_region_id (), &m_root_region, type);
b00a8304
DM
1690 if (ctxt)
1691 ctxt->on_unexpected_tree_code (t, loc);
808f4dfe
DM
1692 return new_reg;
1693}
1694
ce917b04
DM
1695/* Return a region describing a heap-allocated block of memory.
1696 Reuse an existing heap_allocated_region is its id is not within
1697 BASE_REGS_IN_USE. */
808f4dfe
DM
1698
1699const region *
ce917b04 1700region_model_manager::
7dc0ecaf 1701get_or_create_region_for_heap_alloc (const bitmap &base_regs_in_use)
808f4dfe 1702{
ce917b04
DM
1703 /* Try to reuse an existing region, if it's unreferenced in the
1704 client state. */
1705 for (auto existing_reg : m_managed_dynamic_regions)
1706 if (!bitmap_bit_p (base_regs_in_use, existing_reg->get_id ()))
1707 if (existing_reg->get_kind () == RK_HEAP_ALLOCATED)
1708 return existing_reg;
1709
1710 /* All existing ones (if any) are in use; create a new one. */
808f4dfe
DM
1711 region *reg
1712 = new heap_allocated_region (alloc_region_id (), &m_heap_region);
1713 m_managed_dynamic_regions.safe_push (reg);
1714 return reg;
1715}
1716
1717/* Return a new region describing a block of memory allocated within FRAME. */
1718
1719const region *
1720region_model_manager::create_region_for_alloca (const frame_region *frame)
1721{
1722 gcc_assert (frame);
1723 region *reg = new alloca_region (alloc_region_id (), frame);
1724 m_managed_dynamic_regions.safe_push (reg);
1725 return reg;
1726}
1727
1728/* Log OBJ to LOGGER. */
1729
1730template <typename T>
1731static void
1732log_managed_object (logger *logger, const T *obj)
1733{
1734 logger->start_log_line ();
1735 pretty_printer *pp = logger->get_printer ();
1736 pp_string (pp, " ");
1737 obj->dump_to_pp (pp, true);
1738 logger->end_log_line ();
1739}
1740
1741/* Specialization for frame_region, which also logs the count of locals
1742 managed by the frame_region. */
1743
1744template <>
1745void
1746log_managed_object (logger *logger, const frame_region *obj)
1747{
1748 logger->start_log_line ();
1749 pretty_printer *pp = logger->get_printer ();
1750 pp_string (pp, " ");
1751 obj->dump_to_pp (pp, true);
1752 pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
1753 logger->end_log_line ();
1754}
1755
1756/* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1757 If SHOW_OBJS is true, also dump the objects themselves. */
1758
1759template <typename K, typename T>
1760static void
1761log_uniq_map (logger *logger, bool show_objs, const char *title,
1762 const hash_map<K, T*> &uniq_map)
1763{
3989337e 1764 logger->log (" # %s: %li", title, (long)uniq_map.elements ());
b0702ac5
DM
1765 if (!show_objs)
1766 return;
1767 auto_vec<const T *> vec_objs (uniq_map.elements ());
1768 for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
1769 iter != uniq_map.end (); ++iter)
1770 vec_objs.quick_push ((*iter).second);
1771
1772 vec_objs.qsort (T::cmp_ptr_ptr);
1773
1774 unsigned i;
1775 const T *obj;
1776 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1777 log_managed_object<T> (logger, obj);
808f4dfe
DM
1778}
1779
1780/* Dump the number of objects that were managed by MAP to LOGGER.
1781 If SHOW_OBJS is true, also dump the objects themselves. */
1782
1783template <typename T>
1784static void
1785log_uniq_map (logger *logger, bool show_objs, const char *title,
1786 const consolidation_map<T> &map)
1787{
3989337e 1788 logger->log (" # %s: %li", title, (long)map.elements ());
b0702ac5
DM
1789 if (!show_objs)
1790 return;
1791
1792 auto_vec<const T *> vec_objs (map.elements ());
1793 for (typename consolidation_map<T>::iterator iter = map.begin ();
1794 iter != map.end (); ++iter)
1795 vec_objs.quick_push ((*iter).second);
1796
1797 vec_objs.qsort (T::cmp_ptr_ptr);
1798
1799 unsigned i;
1800 const T *obj;
1801 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1802 log_managed_object<T> (logger, obj);
808f4dfe
DM
1803}
1804
1805/* Dump the number of objects of each class that were managed by this
1806 manager to LOGGER.
1807 If SHOW_OBJS is true, also dump the objects themselves. */
1808
1809void
1810region_model_manager::log_stats (logger *logger, bool show_objs) const
1811{
1812 LOG_SCOPE (logger);
bb8e93eb
DM
1813 logger->log ("call string consolidation");
1814 m_empty_call_string.recursive_log (logger);
808f4dfe
DM
1815 logger->log ("svalue consolidation");
1816 log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
1817 log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
1818 if (m_unknown_NULL)
1819 log_managed_object (logger, m_unknown_NULL);
1820 log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
1821 log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
1822 log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
1823 log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
1824 log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
1825 log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
1826 log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
e61ffa20
DM
1827 log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
1828 log_uniq_map (logger, show_objs, "bits_within_svalue",
1829 m_bits_within_values_map);
808f4dfe
DM
1830 log_uniq_map (logger, show_objs, "unmergeable_svalue",
1831 m_unmergeable_values_map);
1832 log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
1833 log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
1834 log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
ded2c2c0
DM
1835 log_uniq_map (logger, show_objs, "asm_output_svalue",
1836 m_asm_output_values_map);
aee1adf2
DM
1837 log_uniq_map (logger, show_objs, "const_fn_result_svalue",
1838 m_const_fn_result_values_map);
ded2c2c0 1839
808f4dfe
DM
1840 logger->log ("max accepted svalue num_nodes: %i",
1841 m_max_complexity.m_num_nodes);
1842 logger->log ("max accepted svalue max_depth: %i",
1843 m_max_complexity.m_max_depth);
1844
1845 logger->log ("region consolidation");
1846 logger->log (" next region id: %i", m_next_region_id);
1847 log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
1848 log_uniq_map (logger, show_objs, "label_region", m_labels_map);
1849 log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
1850 log_uniq_map (logger, show_objs, "field_region", m_field_regions);
1851 log_uniq_map (logger, show_objs, "element_region", m_element_regions);
1852 log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
e61ffa20 1853 log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
808f4dfe
DM
1854 log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
1855 log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
1856 log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
1857 log_uniq_map (logger, show_objs, "string_region", m_string_map);
93e759fc 1858 log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
2402dc6b 1859 log_uniq_map (logger, show_objs, "var_arg_region", m_var_arg_regions);
808f4dfe
DM
1860 logger->log (" # managed dynamic regions: %i",
1861 m_managed_dynamic_regions.length ());
1862 m_store_mgr.log_stats (logger, show_objs);
8ca7fa84 1863 m_range_mgr->log_stats (logger, show_objs);
808f4dfe
DM
1864}
1865
1866/* Dump the number of objects of each class that were managed by this
1867 manager to LOGGER.
1868 If SHOW_OBJS is true, also dump the objects themselves.
1869 This is here so it can use log_uniq_map. */
1870
1871void
1872store_manager::log_stats (logger *logger, bool show_objs) const
1873{
1874 LOG_SCOPE (logger);
1875 log_uniq_map (logger, show_objs, "concrete_binding",
1876 m_concrete_binding_key_mgr);
1877 log_uniq_map (logger, show_objs, "symbolic_binding",
1878 m_symbolic_binding_key_mgr);
1879}
1880
5f6197d7
DM
1881/* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
1882 (using -fdump-analyzer-untracked). */
1883
1884static void
1885dump_untracked_region (const decl_region *decl_reg)
1886{
1887 tree decl = decl_reg->get_decl ();
1888 if (TREE_CODE (decl) != VAR_DECL)
1889 return;
c788a0ea
DM
1890 /* For now, don't emit the status of decls in the constant pool, to avoid
1891 differences in DejaGnu test results between targets that use these vs
1892 those that don't.
1893 (Eventually these decls should probably be untracked and we should test
1894 for that, but that's not stage 4 material). */
1895 if (DECL_IN_CONSTANT_POOL (decl))
1896 return;
5f6197d7
DM
1897 warning_at (DECL_SOURCE_LOCATION (decl), 0,
1898 "track %qD: %s",
1899 decl, (decl_reg->tracked_p () ? "yes" : "no"));
1900}
1901
1902/* Implementation of -fdump-analyzer-untracked. */
1903
1904void
1905region_model_manager::dump_untracked_regions () const
1906{
1907 for (auto iter : m_globals_map)
1908 {
1909 const decl_region *decl_reg = iter.second;
1910 dump_untracked_region (decl_reg);
1911 }
1912 for (auto frame_iter : m_frame_regions)
1913 {
1914 const frame_region *frame_reg = frame_iter.second;
1915 frame_reg->dump_untracked_regions ();
1916 }
1917}
1918
1919void
1920frame_region::dump_untracked_regions () const
1921{
1922 for (auto iter : m_locals)
1923 {
1924 const decl_region *decl_reg = iter.second;
1925 dump_untracked_region (decl_reg);
1926 }
1927}
1928
808f4dfe
DM
1929} // namespace ana
1930
1931#endif /* #if ENABLE_ANALYZER */