]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model-manager.cc
analyzer: use std::unique_ptr for pending_diagnostic/note
[thirdparty/gcc.git] / gcc / analyzer / region-model-manager.cc
CommitLineData
808f4dfe 1/* Consolidation of svalues and regions.
7adcbafe 2 Copyright (C) 2020-2022 Free Software Foundation, Inc.
808f4dfe
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
6341f14e 22#define INCLUDE_MEMORY
808f4dfe
DM
23#include "system.h"
24#include "coretypes.h"
25#include "tree.h"
26#include "diagnostic-core.h"
27#include "gimple-pretty-print.h"
28#include "function.h"
29#include "basic-block.h"
30#include "gimple.h"
31#include "gimple-iterator.h"
32#include "diagnostic-core.h"
33#include "graphviz.h"
34#include "options.h"
35#include "cgraph.h"
36#include "tree-dfa.h"
37#include "stringpool.h"
38#include "convert.h"
39#include "target.h"
40#include "fold-const.h"
41#include "tree-pretty-print.h"
808f4dfe 42#include "bitmap.h"
808f4dfe
DM
43#include "analyzer/analyzer.h"
44#include "analyzer/analyzer-logging.h"
45#include "ordered-hash-map.h"
46#include "options.h"
808f4dfe
DM
47#include "analyzer/supergraph.h"
48#include "sbitmap.h"
49#include "analyzer/call-string.h"
50#include "analyzer/program-point.h"
51#include "analyzer/store.h"
52#include "analyzer/region-model.h"
8ca7fa84 53#include "analyzer/constraint-manager.h"
808f4dfe
DM
54
55#if ENABLE_ANALYZER
56
57namespace ana {
58
59/* class region_model_manager. */
60
61/* region_model_manager's ctor. */
62
11a2ff8d
DM
63region_model_manager::region_model_manager (logger *logger)
64: m_logger (logger),
bb8e93eb 65 m_empty_call_string (),
11a2ff8d 66 m_next_region_id (0),
808f4dfe
DM
67 m_root_region (alloc_region_id ()),
68 m_stack_region (alloc_region_id (), &m_root_region),
69 m_heap_region (alloc_region_id (), &m_root_region),
70 m_unknown_NULL (NULL),
4f34f8cc 71 m_checking_feasibility (false),
808f4dfe
DM
72 m_max_complexity (0, 0),
73 m_code_region (alloc_region_id (), &m_root_region),
74 m_fndecls_map (), m_labels_map (),
75 m_globals_region (alloc_region_id (), &m_root_region),
76 m_globals_map (),
8ca7fa84 77 m_store_mgr (this),
07e30160
DM
78 m_range_mgr (new bounded_ranges_manager ()),
79 m_known_fn_mgr (logger)
808f4dfe
DM
80{
81}
82
83/* region_model_manager's dtor. Delete all of the managed svalues
84 and regions. */
85
86region_model_manager::~region_model_manager ()
87{
88 /* Delete consolidated svalues. */
89 for (constants_map_t::iterator iter = m_constants_map.begin ();
90 iter != m_constants_map.end (); ++iter)
91 delete (*iter).second;
92 for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
93 iter != m_unknowns_map.end (); ++iter)
94 delete (*iter).second;
95 delete m_unknown_NULL;
808f4dfe
DM
96 for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
97 iter != m_poisoned_values_map.end (); ++iter)
98 delete (*iter).second;
99988b0e
DM
99 for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
100 iter != m_setjmp_values_map.end (); ++iter)
101 delete (*iter).second;
808f4dfe
DM
102 for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
103 iter != m_initial_values_map.end (); ++iter)
104 delete (*iter).second;
105 for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
106 iter != m_pointer_values_map.end (); ++iter)
107 delete (*iter).second;
108 for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
109 iter != m_unaryop_values_map.end (); ++iter)
110 delete (*iter).second;
111 for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
112 iter != m_binop_values_map.end (); ++iter)
113 delete (*iter).second;
114 for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
115 iter != m_sub_values_map.end (); ++iter)
116 delete (*iter).second;
99988b0e
DM
117 for (auto iter : m_repeated_values_map)
118 delete iter.second;
119 for (auto iter : m_bits_within_values_map)
120 delete iter.second;
808f4dfe
DM
121 for (unmergeable_values_map_t::iterator iter
122 = m_unmergeable_values_map.begin ();
123 iter != m_unmergeable_values_map.end (); ++iter)
124 delete (*iter).second;
125 for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
126 iter != m_widening_values_map.end (); ++iter)
127 delete (*iter).second;
128 for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
129 iter != m_compound_values_map.end (); ++iter)
130 delete (*iter).second;
131 for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
132 iter != m_conjured_values_map.end (); ++iter)
133 delete (*iter).second;
99988b0e
DM
134 for (auto iter : m_asm_output_values_map)
135 delete iter.second;
136 for (auto iter : m_const_fn_result_values_map)
137 delete iter.second;
808f4dfe
DM
138
139 /* Delete consolidated regions. */
140 for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
141 iter != m_fndecls_map.end (); ++iter)
142 delete (*iter).second;
143 for (labels_map_t::iterator iter = m_labels_map.begin ();
144 iter != m_labels_map.end (); ++iter)
145 delete (*iter).second;
146 for (globals_map_t::iterator iter = m_globals_map.begin ();
147 iter != m_globals_map.end (); ++iter)
148 delete (*iter).second;
149 for (string_map_t::iterator iter = m_string_map.begin ();
150 iter != m_string_map.end (); ++iter)
151 delete (*iter).second;
8ca7fa84
DM
152
153 delete m_range_mgr;
808f4dfe
DM
154}
155
156/* Return true if C exceeds the complexity limit for svalues. */
157
158bool
159region_model_manager::too_complex_p (const complexity &c) const
160{
161 if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
162 return true;
163 return false;
164}
165
166/* If SVAL exceeds the complexity limit for svalues, delete it
167 and return true.
168 Otherwise update m_max_complexity and return false. */
169
170bool
171region_model_manager::reject_if_too_complex (svalue *sval)
172{
4f34f8cc 173 if (m_checking_feasibility)
60933a14
DM
174 return false;
175
808f4dfe
DM
176 const complexity &c = sval->get_complexity ();
177 if (!too_complex_p (c))
178 {
179 if (m_max_complexity.m_num_nodes < c.m_num_nodes)
180 m_max_complexity.m_num_nodes = c.m_num_nodes;
181 if (m_max_complexity.m_max_depth < c.m_max_depth)
182 m_max_complexity.m_max_depth = c.m_max_depth;
183 return false;
184 }
185
186 delete sval;
187 return true;
188}
189
190/* Macro for imposing a complexity limit on svalues, for use within
191 region_model_manager member functions.
192
193 If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
194 value of the same type.
195 Otherwise update m_max_complexity and carry on. */
196
197#define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
198 do { \
199 svalue *sval_ = (SVAL); \
200 tree type_ = sval_->get_type (); \
201 if (reject_if_too_complex (sval_)) \
202 return get_or_create_unknown_svalue (type_); \
203 } while (0)
204
205/* svalue consolidation. */
206
207/* Return the svalue * for a constant_svalue for CST_EXPR,
208 creating it if necessary.
209 The constant_svalue instances are reused, based on pointer equality
210 of trees */
211
212const svalue *
213region_model_manager::get_or_create_constant_svalue (tree cst_expr)
214{
215 gcc_assert (cst_expr);
2aefe248 216 gcc_assert (CONSTANT_CLASS_P (cst_expr));
808f4dfe
DM
217
218 constant_svalue **slot = m_constants_map.get (cst_expr);
219 if (slot)
220 return *slot;
221 constant_svalue *cst_sval = new constant_svalue (cst_expr);
222 RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
223 m_constants_map.put (cst_expr, cst_sval);
224 return cst_sval;
225}
226
1aff29d4
DM
227/* Return the svalue * for a constant_svalue for the INTEGER_CST
228 for VAL of type TYPE, creating it if necessary. */
229
230const svalue *
231region_model_manager::get_or_create_int_cst (tree type, poly_int64 val)
232{
233 gcc_assert (type);
234 tree tree_cst = build_int_cst (type, val);
235 return get_or_create_constant_svalue (tree_cst);
236}
237
808f4dfe
DM
238/* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
239 creating it if necessary.
240 The unknown_svalue instances are reused, based on pointer equality
241 of the types */
242
243const svalue *
244region_model_manager::get_or_create_unknown_svalue (tree type)
245{
4f34f8cc
DM
246 /* Don't create unknown values when doing feasibility testing;
247 instead, create a unique svalue. */
248 if (m_checking_feasibility)
249 return create_unique_svalue (type);
250
808f4dfe
DM
251 /* Special-case NULL, so that the hash_map can use NULL as the
252 "empty" value. */
253 if (type == NULL_TREE)
254 {
255 if (!m_unknown_NULL)
256 m_unknown_NULL = new unknown_svalue (type);
257 return m_unknown_NULL;
258 }
259
260 unknown_svalue **slot = m_unknowns_map.get (type);
261 if (slot)
262 return *slot;
263 unknown_svalue *sval = new unknown_svalue (type);
264 m_unknowns_map.put (type, sval);
265 return sval;
266}
267
4f34f8cc
DM
268/* Return a freshly-allocated svalue of TYPE, owned by this manager. */
269
270const svalue *
271region_model_manager::create_unique_svalue (tree type)
272{
273 svalue *sval = new placeholder_svalue (type, "unique");
274 m_managed_dynamic_svalues.safe_push (sval);
275 return sval;
276}
277
808f4dfe
DM
278/* Return the svalue * for the initial value of REG, creating it if
279 necessary. */
280
281const svalue *
282region_model_manager::get_or_create_initial_value (const region *reg)
283{
33255ad3
DM
284 if (!reg->can_have_initial_svalue_p ())
285 return get_or_create_poisoned_svalue (POISON_KIND_UNINIT,
286 reg->get_type ());
287
808f4dfe
DM
288 /* The initial value of a cast is a cast of the initial value. */
289 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
290 {
291 const region *original_reg = cast_reg->get_original_region ();
292 return get_or_create_cast (cast_reg->get_type (),
293 get_or_create_initial_value (original_reg));
294 }
295
11d4ec5d
DM
296 /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
297 if (reg->symbolic_for_unknown_ptr_p ())
298 return get_or_create_unknown_svalue (reg->get_type ());
299
808f4dfe
DM
300 if (initial_svalue **slot = m_initial_values_map.get (reg))
301 return *slot;
302 initial_svalue *initial_sval = new initial_svalue (reg->get_type (), reg);
303 RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
304 m_initial_values_map.put (reg, initial_sval);
305 return initial_sval;
306}
307
308/* Return the svalue * for R using type TYPE, creating it if
309 necessary. */
310
311const svalue *
312region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
313 tree type)
314{
315 setjmp_svalue::key_t key (r, type);
316 if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
317 return *slot;
318 setjmp_svalue *setjmp_sval = new setjmp_svalue (r, type);
319 RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
320 m_setjmp_values_map.put (key, setjmp_sval);
321 return setjmp_sval;
322}
323
324/* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
325 necessary. */
326
327const svalue *
328region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
329 tree type)
330{
331 poisoned_svalue::key_t key (kind, type);
332 if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
333 return *slot;
334 poisoned_svalue *poisoned_sval = new poisoned_svalue (kind, type);
335 RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
336 m_poisoned_values_map.put (key, poisoned_sval);
337 return poisoned_sval;
338}
339
340/* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
341 creating it if necessary. */
342
343const svalue *
344region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
345{
346 /* If this is a symbolic region from dereferencing a pointer, and the types
347 match, then return the original pointer. */
348 if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
349 if (ptr_type == sym_reg->get_pointer ()->get_type ())
350 return sym_reg->get_pointer ();
351
352 region_svalue::key_t key (ptr_type, pointee);
353 if (region_svalue **slot = m_pointer_values_map.get (key))
354 return *slot;
355 region_svalue *sval = new region_svalue (ptr_type, pointee);
356 RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
357 m_pointer_values_map.put (key, sval);
358 return sval;
359}
360
361/* Subroutine of region_model_manager::get_or_create_unaryop.
362 Attempt to fold the inputs and return a simpler svalue *.
363 Otherwise, return NULL. */
364
365const svalue *
366region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
367 const svalue *arg)
368{
369 /* Ops on "unknown" are also unknown. */
370 if (arg->get_kind () == SK_UNKNOWN)
371 return get_or_create_unknown_svalue (type);
a113b143
DM
372 /* Likewise for "poisoned". */
373 else if (const poisoned_svalue *poisoned_sval
374 = arg->dyn_cast_poisoned_svalue ())
375 return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
376 type);
377
378 gcc_assert (arg->can_have_associated_state_p ());
808f4dfe
DM
379
380 switch (op)
381 {
382 default: break;
ecdb9322 383 case VIEW_CONVERT_EXPR:
808f4dfe
DM
384 case NOP_EXPR:
385 {
386 /* Handle redundant casts. */
387 if (arg->get_type ()
388 && useless_type_conversion_p (arg->get_type (), type))
389 return arg;
390
391 /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
392 => "cast<TYPE> (innermost_arg)",
393 unless INNER_TYPE is narrower than TYPE. */
394 if (const svalue *innermost_arg = arg->maybe_undo_cast ())
395 {
396 tree inner_type = arg->get_type ();
397 if (TYPE_SIZE (type)
398 && TYPE_SIZE (inner_type)
399 && (fold_binary (LE_EXPR, boolean_type_node,
400 TYPE_SIZE (type), TYPE_SIZE (inner_type))
401 == boolean_true_node))
402 return maybe_fold_unaryop (type, op, innermost_arg);
403 }
111fd515
DM
404 /* Avoid creating symbolic regions for pointer casts by
405 simplifying (T*)(&REGION) to ((T*)&REGION). */
406 if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
407 if (POINTER_TYPE_P (type)
408 && region_sval->get_type ()
409 && POINTER_TYPE_P (region_sval->get_type ()))
410 return get_ptr_svalue (type, region_sval->get_pointee ());
808f4dfe
DM
411 }
412 break;
413 case TRUTH_NOT_EXPR:
414 {
415 /* Invert comparisons e.g. "!(x == y)" => "x != y". */
416 if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
417 if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
418 {
419 enum tree_code inv_op
420 = invert_tree_comparison (binop->get_op (),
421 HONOR_NANS (binop->get_type ()));
422 if (inv_op != ERROR_MARK)
423 return get_or_create_binop (binop->get_type (), inv_op,
424 binop->get_arg0 (),
425 binop->get_arg1 ());
426 }
427 }
428 break;
7f42f7ad
DM
429 case NEGATE_EXPR:
430 {
431 /* -(-(VAL)) is VAL, for integer types. */
432 if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ())
433 if (unaryop->get_op () == NEGATE_EXPR
434 && type == unaryop->get_type ()
435 && type
436 && INTEGRAL_TYPE_P (type))
437 return unaryop->get_arg ();
438 }
439 break;
808f4dfe
DM
440 }
441
442 /* Constants. */
443 if (tree cst = arg->maybe_get_constant ())
444 if (tree result = fold_unary (op, type, cst))
2aefe248
DM
445 {
446 if (CONSTANT_CLASS_P (result))
447 return get_or_create_constant_svalue (result);
448
449 /* fold_unary can return casts of constants; try to handle them. */
450 if (op != NOP_EXPR
451 && type
452 && TREE_CODE (result) == NOP_EXPR
453 && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
454 {
455 const svalue *inner_cst
456 = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
457 return get_or_create_cast (type,
458 get_or_create_cast (TREE_TYPE (result),
459 inner_cst));
460 }
461 }
808f4dfe
DM
462
463 return NULL;
464}
465
466/* Return the svalue * for an unary operation OP on ARG with a result of
467 type TYPE, creating it if necessary. */
468
469const svalue *
470region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
471 const svalue *arg)
472{
473 if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
474 return folded;
475 unaryop_svalue::key_t key (type, op, arg);
476 if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
477 return *slot;
478 unaryop_svalue *unaryop_sval = new unaryop_svalue (type, op, arg);
479 RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
480 m_unaryop_values_map.put (key, unaryop_sval);
481 return unaryop_sval;
482}
483
ecdb9322
DM
484/* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
485 Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
486 of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
487 and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
488 on. */
489
490static enum tree_code
491get_code_for_cast (tree dst_type, tree src_type)
492{
493 gcc_assert (dst_type);
494 if (!src_type)
495 return NOP_EXPR;
496
497 if (TREE_CODE (src_type) == REAL_TYPE)
498 {
499 if (TREE_CODE (dst_type) == INTEGER_TYPE)
500 return FIX_TRUNC_EXPR;
501 else
502 return VIEW_CONVERT_EXPR;
503 }
504
505 return NOP_EXPR;
506}
507
808f4dfe
DM
508/* Return the svalue * for a cast of ARG to type TYPE, creating it
509 if necessary. */
510
511const svalue *
512region_model_manager::get_or_create_cast (tree type, const svalue *arg)
513{
366bd1ac 514 gcc_assert (type);
45b999f6
DM
515
516 /* No-op if the types are the same. */
517 if (type == arg->get_type ())
518 return arg;
519
520 /* Don't attempt to handle casts involving vector types for now. */
521 if (TREE_CODE (type) == VECTOR_TYPE
522 || (arg->get_type ()
523 && TREE_CODE (arg->get_type ()) == VECTOR_TYPE))
524 return get_or_create_unknown_svalue (type);
525
ecdb9322
DM
526 enum tree_code op = get_code_for_cast (type, arg->get_type ());
527 return get_or_create_unaryop (type, op, arg);
808f4dfe
DM
528}
529
ec3fafa9
DM
530/* Subroutine of region_model_manager::maybe_fold_binop for handling
531 (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
532 optimize_bit_field_compare, where CST is from ARG1.
533
534 Support masking out bits from a compound_svalue for comparing a bitfield
535 against a value, as generated by optimize_bit_field_compare for
536 BITFIELD == VALUE.
537
538 If COMPOUND_SVAL has a value for the appropriate bits, return it,
539 shifted accordingly.
540 Otherwise return NULL. */
541
542const svalue *
543region_model_manager::
544maybe_undo_optimize_bit_field_compare (tree type,
545 const compound_svalue *compound_sval,
546 tree cst,
547 const svalue *arg1)
548{
549 if (type != unsigned_char_type_node)
550 return NULL;
551
552 const binding_map &map = compound_sval->get_map ();
553 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
554 /* If "mask" is a contiguous range of set bits, see if the
555 compound_sval has a value for those bits. */
556 bit_range bits (0, 0);
557 if (!bit_range::from_mask (mask, &bits))
558 return NULL;
559
560 bit_range bound_bits (bits);
561 if (BYTES_BIG_ENDIAN)
562 bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
563 bits.m_size_in_bits);
564 const concrete_binding *conc
e61ffa20 565 = get_store_manager ()->get_concrete_binding (bound_bits);
ec3fafa9
DM
566 const svalue *sval = map.get (conc);
567 if (!sval)
568 return NULL;
569
570 /* We have a value;
571 shift it by the correct number of bits. */
572 const svalue *lhs = get_or_create_cast (type, sval);
573 HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
1aff29d4 574 const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
ec3fafa9
DM
575 const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
576 lhs, shift_sval);
577 /* Reapply the mask (needed for negative
578 signed bitfields). */
579 return get_or_create_binop (type, BIT_AND_EXPR,
580 shifted_sval, arg1);
581}
582
808f4dfe
DM
583/* Subroutine of region_model_manager::get_or_create_binop.
584 Attempt to fold the inputs and return a simpler svalue *.
585 Otherwise, return NULL. */
586
587const svalue *
588region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
589 const svalue *arg0,
590 const svalue *arg1)
591{
592 tree cst0 = arg0->maybe_get_constant ();
593 tree cst1 = arg1->maybe_get_constant ();
594 /* (CST OP CST). */
595 if (cst0 && cst1)
596 {
597 if (tree result = fold_binary (op, type, cst0, cst1))
598 if (CONSTANT_CLASS_P (result))
599 return get_or_create_constant_svalue (result);
600 }
601
602 if (FLOAT_TYPE_P (type)
603 || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
604 || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
605 return NULL;
606
607 switch (op)
608 {
609 default:
610 break;
611 case POINTER_PLUS_EXPR:
612 case PLUS_EXPR:
613 /* (VAL + 0) -> VAL. */
614 if (cst1 && zerop (cst1) && type == arg0->get_type ())
615 return arg0;
616 break;
617 case MINUS_EXPR:
618 /* (VAL - 0) -> VAL. */
619 if (cst1 && zerop (cst1) && type == arg0->get_type ())
620 return arg0;
621 break;
622 case MULT_EXPR:
623 /* (VAL * 0). */
fc02b568 624 if (cst1 && zerop (cst1) && INTEGRAL_TYPE_P (type))
808f4dfe
DM
625 return get_or_create_constant_svalue (build_int_cst (type, 0));
626 /* (VAL * 1) -> VAL. */
627 if (cst1 && integer_onep (cst1))
628 return arg0;
629 break;
df2b78d4
DM
630 case BIT_AND_EXPR:
631 if (cst1)
d3b1ef7a
DM
632 {
633 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
634 /* "(ARG0 & 0)" -> "0". */
635 return get_or_create_constant_svalue (build_int_cst (type, 0));
636
d3b1ef7a
DM
637 if (const compound_svalue *compound_sval
638 = arg0->dyn_cast_compound_svalue ())
ec3fafa9
DM
639 if (const svalue *sval
640 = maybe_undo_optimize_bit_field_compare (type,
641 compound_sval,
642 cst1, arg1))
643 return sval;
d3b1ef7a 644 }
4f34f8cc
DM
645 if (arg0->get_type () == boolean_type_node
646 && arg1->get_type () == boolean_type_node)
647 {
648 /* If the LHS are both _Bool, then... */
649 /* ..."(1 & x) -> x". */
650 if (cst0 && !zerop (cst0))
651 return get_or_create_cast (type, arg1);
652 /* ..."(x & 1) -> x". */
653 if (cst1 && !zerop (cst1))
654 return get_or_create_cast (type, arg0);
655 /* ..."(0 & x) -> 0". */
656 if (cst0 && zerop (cst0))
657 return get_or_create_int_cst (type, 0);
658 /* ..."(x & 0) -> 0". */
659 if (cst1 && zerop (cst1))
660 return get_or_create_int_cst (type, 0);
661 }
662 break;
663 case BIT_IOR_EXPR:
664 if (arg0->get_type () == boolean_type_node
665 && arg1->get_type () == boolean_type_node)
666 {
667 /* If the LHS are both _Bool, then... */
668 /* ..."(1 | x) -> 1". */
669 if (cst0 && !zerop (cst0))
670 return get_or_create_int_cst (type, 1);
671 /* ..."(x | 1) -> 1". */
672 if (cst1 && !zerop (cst1))
673 return get_or_create_int_cst (type, 1);
674 /* ..."(0 | x) -> x". */
675 if (cst0 && zerop (cst0))
676 return get_or_create_cast (type, arg1);
677 /* ..."(x | 0) -> x". */
678 if (cst1 && zerop (cst1))
679 return get_or_create_cast (type, arg0);
680 }
df2b78d4 681 break;
808f4dfe
DM
682 case TRUTH_ANDIF_EXPR:
683 case TRUTH_AND_EXPR:
684 if (cst1)
685 {
fc02b568 686 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
808f4dfe
DM
687 /* "(ARG0 && 0)" -> "0". */
688 return get_or_create_constant_svalue (build_int_cst (type, 0));
689 else
690 /* "(ARG0 && nonzero-cst)" -> "ARG0". */
691 return get_or_create_cast (type, arg0);
692 }
693 break;
694 case TRUTH_ORIF_EXPR:
695 case TRUTH_OR_EXPR:
696 if (cst1)
697 {
698 if (zerop (cst1))
699 /* "(ARG0 || 0)" -> "ARG0". */
700 return get_or_create_cast (type, arg0);
701 else
702 /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
703 return get_or_create_cast (type, arg1);
704 }
705 break;
706 }
707
708 /* For associative ops, fold "(X op CST_A) op CST_B)" to
709 "X op (CST_A op CST_B)". */
710 if (cst1 && associative_tree_code (op))
711 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
712 if (binop->get_op () == op
713 && binop->get_arg1 ()->maybe_get_constant ()
714 && type == binop->get_type ()
715 && type == binop->get_arg0 ()->get_type ()
716 && type == binop->get_arg1 ()->get_type ())
717 return get_or_create_binop
718 (type, op, binop->get_arg0 (),
719 get_or_create_binop (type, op,
720 binop->get_arg1 (), arg1));
721
722 /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
723 can fold:
724 "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
725 e.g. in data-model-1.c: test_4c. */
726 if (cst1 && op == POINTER_PLUS_EXPR)
727 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
728 if (binop->get_op () == POINTER_PLUS_EXPR)
729 if (binop->get_arg1 ()->maybe_get_constant ())
730 return get_or_create_binop
731 (type, op, binop->get_arg0 (),
732 get_or_create_binop (size_type_node, op,
733 binop->get_arg1 (), arg1));
734
808f4dfe
DM
735 /* etc. */
736
737 return NULL;
738}
739
740/* Return the svalue * for an binary operation OP on ARG0 and ARG1
741 with a result of type TYPE, creating it if necessary. */
742
743const svalue *
744region_model_manager::get_or_create_binop (tree type, enum tree_code op,
745 const svalue *arg0,
746 const svalue *arg1)
747{
748 /* For commutative ops, put any constant on the RHS. */
749 if (arg0->maybe_get_constant () && commutative_tree_code (op))
750 std::swap (arg0, arg1);
751
752 if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
753 return folded;
754
a113b143
DM
755 /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
756 it via an identity in maybe_fold_binop). */
757 if (!arg0->can_have_associated_state_p ()
758 || !arg1->can_have_associated_state_p ())
759 return get_or_create_unknown_svalue (type);
760
808f4dfe
DM
761 binop_svalue::key_t key (type, op, arg0, arg1);
762 if (binop_svalue **slot = m_binop_values_map.get (key))
763 return *slot;
764 binop_svalue *binop_sval = new binop_svalue (type, op, arg0, arg1);
765 RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
766 m_binop_values_map.put (key, binop_sval);
767 return binop_sval;
768}
769
770/* Subroutine of region_model_manager::get_or_create_sub_svalue.
771 Return a folded svalue, or NULL. */
772
773const svalue *
774region_model_manager::maybe_fold_sub_svalue (tree type,
775 const svalue *parent_svalue,
776 const region *subregion)
777{
a113b143
DM
778 /* Subvalues of "unknown"/"poisoned" are unknown. */
779 if (!parent_svalue->can_have_associated_state_p ())
808f4dfe
DM
780 return get_or_create_unknown_svalue (type);
781
782 /* If we have a subregion of a zero-fill, it's zero. */
783 if (const unaryop_svalue *unary
784 = parent_svalue->dyn_cast_unaryop_svalue ())
785 {
ecdb9322
DM
786 if (unary->get_op () == NOP_EXPR
787 || unary->get_op () == VIEW_CONVERT_EXPR)
808f4dfe 788 if (tree cst = unary->get_arg ()->maybe_get_constant ())
84832cab 789 if (zerop (cst) && type)
808f4dfe
DM
790 {
791 const svalue *cst_sval
792 = get_or_create_constant_svalue (cst);
793 return get_or_create_cast (type, cst_sval);
794 }
795 }
796
797 /* Handle getting individual chars from a STRING_CST. */
798 if (tree cst = parent_svalue->maybe_get_constant ())
799 if (TREE_CODE (cst) == STRING_CST)
2ac7b19f
DM
800 {
801 /* If we have a concrete 1-byte access within the parent region... */
802 byte_range subregion_bytes (0, 0);
803 if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
84832cab
DM
804 && subregion_bytes.m_size_in_bytes == 1
805 && type)
2ac7b19f
DM
806 {
807 /* ...then attempt to get that char from the STRING_CST. */
808 HOST_WIDE_INT hwi_start_byte
809 = subregion_bytes.m_start_byte_offset.to_shwi ();
810 tree cst_idx
811 = build_int_cst_type (size_type_node, hwi_start_byte);
808f4dfe
DM
812 if (const svalue *char_sval
813 = maybe_get_char_from_string_cst (cst, cst_idx))
814 return get_or_create_cast (type, char_sval);
2ac7b19f
DM
815 }
816 }
808f4dfe 817
808f4dfe 818 if (const initial_svalue *init_sval
e61ffa20 819 = parent_svalue->dyn_cast_initial_svalue ())
808f4dfe 820 {
e61ffa20
DM
821 /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
822 i.e.
823 Subvalue(InitialValue(R1), FieldRegion(R2, F))
824 -> InitialValue(FieldRegion(R1, F)). */
808f4dfe
DM
825 if (const field_region *field_reg = subregion->dyn_cast_field_region ())
826 {
827 const region *field_reg_new
828 = get_field_region (init_sval->get_region (),
829 field_reg->get_field ());
830 return get_or_create_initial_value (field_reg_new);
831 }
e61ffa20
DM
832 /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
833 i.e.
834 Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
835 -> InitialValue(ElementRegion(R1, IDX)). */
836 if (const element_region *element_reg = subregion->dyn_cast_element_region ())
837 {
838 const region *element_reg_new
839 = get_element_region (init_sval->get_region (),
840 element_reg->get_type (),
841 element_reg->get_index ());
842 return get_or_create_initial_value (element_reg_new);
843 }
808f4dfe
DM
844 }
845
e61ffa20
DM
846 if (const repeated_svalue *repeated_sval
847 = parent_svalue->dyn_cast_repeated_svalue ())
79e746bb
DM
848 if (type)
849 return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
e61ffa20 850
808f4dfe
DM
851 return NULL;
852}
853
854/* Return the svalue * for extracting a subvalue of type TYPE from
855 PARENT_SVALUE based on SUBREGION, creating it if necessary. */
856
857const svalue *
858region_model_manager::get_or_create_sub_svalue (tree type,
859 const svalue *parent_svalue,
860 const region *subregion)
861{
862 if (const svalue *folded
863 = maybe_fold_sub_svalue (type, parent_svalue, subregion))
864 return folded;
865
866 sub_svalue::key_t key (type, parent_svalue, subregion);
867 if (sub_svalue **slot = m_sub_values_map.get (key))
868 return *slot;
869 sub_svalue *sub_sval
870 = new sub_svalue (type, parent_svalue, subregion);
871 RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
872 m_sub_values_map.put (key, sub_sval);
873 return sub_sval;
874}
875
e61ffa20
DM
876/* Subroutine of region_model_manager::get_or_create_repeated_svalue.
877 Return a folded svalue, or NULL. */
878
879const svalue *
880region_model_manager::maybe_fold_repeated_svalue (tree type,
881 const svalue *outer_size,
882 const svalue *inner_svalue)
883{
a113b143
DM
884 /* Repeated "unknown"/"poisoned" is unknown. */
885 if (!outer_size->can_have_associated_state_p ()
886 || !inner_svalue->can_have_associated_state_p ())
887 return get_or_create_unknown_svalue (type);
888
e61ffa20
DM
889 /* If INNER_SVALUE is the same size as OUTER_SIZE,
890 turn into simply a cast. */
891 if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
892 {
893 HOST_WIDE_INT num_bytes_inner_svalue
894 = int_size_in_bytes (inner_svalue->get_type ());
895 if (num_bytes_inner_svalue != -1)
896 if (num_bytes_inner_svalue
897 == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
898 {
899 if (type)
900 return get_or_create_cast (type, inner_svalue);
901 else
902 return inner_svalue;
903 }
904 }
905
906 /* Handle zero-fill of a specific type. */
907 if (tree cst = inner_svalue->maybe_get_constant ())
908 if (zerop (cst) && type)
909 return get_or_create_cast (type, inner_svalue);
910
911 return NULL;
912}
913
914/* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
915 enough times to be of size OUTER_SIZE, creating it if necessary.
916 e.g. for filling buffers with a constant value. */
917
918const svalue *
919region_model_manager::get_or_create_repeated_svalue (tree type,
920 const svalue *outer_size,
921 const svalue *inner_svalue)
922{
923 if (const svalue *folded
924 = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
925 return folded;
926
927 repeated_svalue::key_t key (type, outer_size, inner_svalue);
928 if (repeated_svalue **slot = m_repeated_values_map.get (key))
929 return *slot;
930 repeated_svalue *repeated_sval
931 = new repeated_svalue (type, outer_size, inner_svalue);
932 RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
933 m_repeated_values_map.put (key, repeated_sval);
934 return repeated_sval;
935}
936
937/* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
938 Return true and write the result to OUT if successful.
939 Return false otherwise. */
940
941static bool
942get_bit_range_for_field (tree field, bit_range *out)
943{
944 bit_size_t bit_size;
945 if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
946 return false;
947 int field_bit_offset = int_bit_position (field);
948 *out = bit_range (field_bit_offset, bit_size);
949 return true;
950}
951
952/* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
953 Return true and write the result to OUT if successful.
954 Return false otherwise. */
955
956static bool
957get_byte_range_for_field (tree field, byte_range *out)
958{
959 bit_range field_bits (0, 0);
960 if (!get_bit_range_for_field (field, &field_bits))
961 return false;
962 return field_bits.as_byte_range (out);
963}
964
965/* Attempt to determine if there is a specific field within RECORD_TYPE
966 at BYTES. If so, return it, and write the location of BYTES relative
967 to the field to *OUT_RANGE_WITHIN_FIELD.
968 Otherwise, return NULL_TREE.
969 For example, given:
970 struct foo { uint32 a; uint32; b};
971 and
972 bytes = {bytes 6-7} (of foo)
973 we have bytes 3-4 of field b. */
974
975static tree
976get_field_at_byte_range (tree record_type, const byte_range &bytes,
977 byte_range *out_range_within_field)
978{
979 bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
980
981 tree field = get_field_at_bit_offset (record_type, bit_offset);
982 if (!field)
983 return NULL_TREE;
984
985 byte_range field_bytes (0,0);
986 if (!get_byte_range_for_field (field, &field_bytes))
987 return NULL_TREE;
988
989 /* Is BYTES fully within field_bytes? */
990 byte_range bytes_within_field (0,0);
991 if (!field_bytes.contains_p (bytes, &bytes_within_field))
992 return NULL_TREE;
993
994 *out_range_within_field = bytes_within_field;
995 return field;
996}
997
998/* Subroutine of region_model_manager::get_or_create_bits_within.
999 Return a folded svalue, or NULL. */
1000
1001const svalue *
1002region_model_manager::maybe_fold_bits_within_svalue (tree type,
1003 const bit_range &bits,
1004 const svalue *inner_svalue)
1005{
1006 tree inner_type = inner_svalue->get_type ();
1007 /* Fold:
1008 BITS_WITHIN ((0, sizeof (VAL), VAL))
1009 to:
1010 CAST(TYPE, VAL). */
1011 if (bits.m_start_bit_offset == 0 && inner_type)
1012 {
1013 bit_size_t inner_type_size;
1014 if (int_size_in_bits (inner_type, &inner_type_size))
1015 if (inner_type_size == bits.m_size_in_bits)
1016 {
1017 if (type)
1018 return get_or_create_cast (type, inner_svalue);
1019 else
1020 return inner_svalue;
1021 }
1022 }
1023
1024 /* Kind-specific folding. */
1025 if (const svalue *sval
1026 = inner_svalue->maybe_fold_bits_within (type, bits, this))
1027 return sval;
1028
1029 byte_range bytes (0,0);
1030 if (bits.as_byte_range (&bytes) && inner_type)
1031 switch (TREE_CODE (inner_type))
1032 {
1033 default:
1034 break;
1035 case ARRAY_TYPE:
1036 {
1037 /* Fold:
1038 BITS_WITHIN (range, KIND(REG))
1039 to:
1040 BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1041 if range1 is a byte-range fully within one ELEMENT. */
1042 tree element_type = TREE_TYPE (inner_type);
1043 HOST_WIDE_INT element_byte_size
1044 = int_size_in_bytes (element_type);
1045 if (element_byte_size > 0)
1046 {
1047 HOST_WIDE_INT start_idx
1048 = (bytes.get_start_byte_offset ().to_shwi ()
1049 / element_byte_size);
1050 HOST_WIDE_INT last_idx
1051 = (bytes.get_last_byte_offset ().to_shwi ()
1052 / element_byte_size);
1053 if (start_idx == last_idx)
1054 {
1055 if (const initial_svalue *initial_sval
1056 = inner_svalue->dyn_cast_initial_svalue ())
1057 {
1058 bit_offset_t start_of_element
1059 = start_idx * element_byte_size * BITS_PER_UNIT;
1060 bit_range bits_within_element
1061 (bits.m_start_bit_offset - start_of_element,
1062 bits.m_size_in_bits);
1063 const svalue *idx_sval
1064 = get_or_create_int_cst (integer_type_node, start_idx);
1065 const region *element_reg =
1066 get_element_region (initial_sval->get_region (),
1067 element_type, idx_sval);
1068 const svalue *element_reg_sval
1069 = get_or_create_initial_value (element_reg);
1070 return get_or_create_bits_within (type,
1071 bits_within_element,
1072 element_reg_sval);
1073 }
1074 }
1075 }
1076 }
1077 break;
1078 case RECORD_TYPE:
1079 {
1080 /* Fold:
1081 BYTES_WITHIN (range, KIND(REG))
1082 to:
1083 BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1084 if range1 is fully within FIELD. */
1085 byte_range bytes_within_field (0, 0);
1086 if (tree field = get_field_at_byte_range (inner_type, bytes,
1087 &bytes_within_field))
1088 {
1089 if (const initial_svalue *initial_sval
1090 = inner_svalue->dyn_cast_initial_svalue ())
1091 {
1092 const region *field_reg =
1093 get_field_region (initial_sval->get_region (), field);
1094 const svalue *initial_reg_sval
1095 = get_or_create_initial_value (field_reg);
1096 return get_or_create_bits_within
1097 (type,
1098 bytes_within_field.as_bit_range (),
1099 initial_reg_sval);
1100 }
1101 }
1102 }
1103 break;
1104 }
1105 return NULL;
1106}
1107
1108/* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1109 creating it if necessary. */
1110
1111const svalue *
1112region_model_manager::get_or_create_bits_within (tree type,
1113 const bit_range &bits,
1114 const svalue *inner_svalue)
1115{
1116 if (const svalue *folded
1117 = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1118 return folded;
1119
1120 bits_within_svalue::key_t key (type, bits, inner_svalue);
1121 if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1122 return *slot;
1123 bits_within_svalue *bits_within_sval
1124 = new bits_within_svalue (type, bits, inner_svalue);
1125 RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1126 m_bits_within_values_map.put (key, bits_within_sval);
1127 return bits_within_sval;
1128}
1129
808f4dfe
DM
1130/* Return the svalue * that decorates ARG as being unmergeable,
1131 creating it if necessary. */
1132
1133const svalue *
1134region_model_manager::get_or_create_unmergeable (const svalue *arg)
1135{
1136 if (arg->get_kind () == SK_UNMERGEABLE)
1137 return arg;
1138
1139 if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1140 return *slot;
1141 unmergeable_svalue *unmergeable_sval = new unmergeable_svalue (arg);
1142 RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1143 m_unmergeable_values_map.put (arg, unmergeable_sval);
1144 return unmergeable_sval;
1145}
1146
1147/* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1148 and ITER_SVAL at POINT, creating it if necessary. */
1149
1150const svalue *
e6fe02d8
DM
1151region_model_manager::
1152get_or_create_widening_svalue (tree type,
1153 const function_point &point,
1154 const svalue *base_sval,
1155 const svalue *iter_sval)
808f4dfe 1156{
2fc20138
DM
1157 gcc_assert (base_sval->get_kind () != SK_WIDENING);
1158 gcc_assert (iter_sval->get_kind () != SK_WIDENING);
808f4dfe
DM
1159 widening_svalue::key_t key (type, point, base_sval, iter_sval);
1160 if (widening_svalue **slot = m_widening_values_map.get (key))
1161 return *slot;
1162 widening_svalue *widening_sval
1163 = new widening_svalue (type, point, base_sval, iter_sval);
1164 RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1165 m_widening_values_map.put (key, widening_sval);
1166 return widening_sval;
1167}
1168
1169/* Return the svalue * of type TYPE for the compound values in MAP,
1170 creating it if necessary. */
1171
1172const svalue *
1173region_model_manager::get_or_create_compound_svalue (tree type,
1174 const binding_map &map)
1175{
1176 compound_svalue::key_t tmp_key (type, &map);
1177 if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1178 return *slot;
1179 compound_svalue *compound_sval
1180 = new compound_svalue (type, map);
1181 RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1182 /* Use make_key rather than reusing the key, so that we use a
1183 ptr to compound_sval's binding_map, rather than the MAP param. */
1184 m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1185 return compound_sval;
1186}
1187
3734527d
DM
1188/* class conjured_purge. */
1189
1190/* Purge state relating to SVAL. */
1191
1192void
1193conjured_purge::purge (const conjured_svalue *sval) const
1194{
1195 m_model->purge_state_involving (sval, m_ctxt);
1196}
1197
808f4dfe 1198/* Return the svalue * of type TYPE for the value conjured for ID_REG
3734527d
DM
1199 at STMT, creating it if necessary.
1200 Use P to purge existing state from the svalue, for the case where a
1201 conjured_svalue would be reused along an execution path. */
808f4dfe
DM
1202
1203const svalue *
1204region_model_manager::get_or_create_conjured_svalue (tree type,
1205 const gimple *stmt,
3734527d
DM
1206 const region *id_reg,
1207 const conjured_purge &p)
808f4dfe
DM
1208{
1209 conjured_svalue::key_t key (type, stmt, id_reg);
1210 if (conjured_svalue **slot = m_conjured_values_map.get (key))
3734527d
DM
1211 {
1212 const conjured_svalue *sval = *slot;
1213 /* We're reusing an existing conjured_svalue, perhaps from a different
1214 state within this analysis, or perhaps from an earlier state on this
1215 execution path. For the latter, purge any state involving the "new"
1216 svalue from the current program_state. */
1217 p.purge (sval);
1218 return sval;
1219 }
808f4dfe
DM
1220 conjured_svalue *conjured_sval
1221 = new conjured_svalue (type, stmt, id_reg);
1222 RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1223 m_conjured_values_map.put (key, conjured_sval);
1224 return conjured_sval;
1225}
1226
ded2c2c0
DM
1227/* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1228 Return a folded svalue, or NULL. */
1229
1230const svalue *
1231region_model_manager::
1232maybe_fold_asm_output_svalue (tree type,
1233 const vec<const svalue *> &inputs)
1234{
1235 /* Unknown inputs should lead to unknown results. */
1236 for (const auto &iter : inputs)
1237 if (iter->get_kind () == SK_UNKNOWN)
1238 return get_or_create_unknown_svalue (type);
1239
1240 return NULL;
1241}
1242
1243/* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1244 asm stmt ASM_STMT, given INPUTS as inputs. */
1245
1246const svalue *
1247region_model_manager::
1248get_or_create_asm_output_svalue (tree type,
1249 const gasm *asm_stmt,
1250 unsigned output_idx,
1251 const vec<const svalue *> &inputs)
1252{
1253 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1254
1255 if (const svalue *folded
1256 = maybe_fold_asm_output_svalue (type, inputs))
1257 return folded;
1258
1259 const char *asm_string = gimple_asm_string (asm_stmt);
1260 const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1261
1262 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1263 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1264 return *slot;
1265 asm_output_svalue *asm_output_sval
1266 = new asm_output_svalue (type, asm_string, output_idx, noutputs, inputs);
1267 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1268 m_asm_output_values_map.put (key, asm_output_sval);
1269 return asm_output_sval;
1270}
1271
bfca9505
DM
1272/* Return the svalue * of type TYPE for OUTPUT_IDX of a deterministic
1273 asm stmt with string ASM_STRING with NUM_OUTPUTS outputs, given
1274 INPUTS as inputs. */
1275
1276const svalue *
1277region_model_manager::
1278get_or_create_asm_output_svalue (tree type,
1279 const char *asm_string,
1280 unsigned output_idx,
1281 unsigned num_outputs,
1282 const vec<const svalue *> &inputs)
1283{
1284 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1285
1286 if (const svalue *folded
1287 = maybe_fold_asm_output_svalue (type, inputs))
1288 return folded;
1289
1290 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1291 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1292 return *slot;
1293 asm_output_svalue *asm_output_sval
1294 = new asm_output_svalue (type, asm_string, output_idx, num_outputs, inputs);
1295 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1296 m_asm_output_values_map.put (key, asm_output_sval);
1297 return asm_output_sval;
1298}
aee1adf2
DM
1299
1300/* Return the svalue * of type TYPE for the result of a call to FNDECL
1301 with __attribute__((const)), given INPUTS as inputs. */
1302
1303const svalue *
1304region_model_manager::
1305get_or_create_const_fn_result_svalue (tree type,
1306 tree fndecl,
1307 const vec<const svalue *> &inputs)
1308{
1309 gcc_assert (type);
1310 gcc_assert (fndecl);
1311 gcc_assert (DECL_P (fndecl));
1312 gcc_assert (TREE_READONLY (fndecl));
1313 gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1314
1315 const_fn_result_svalue::key_t key (type, fndecl, inputs);
1316 if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1317 return *slot;
1318 const_fn_result_svalue *const_fn_result_sval
1319 = new const_fn_result_svalue (type, fndecl, inputs);
1320 RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1321 m_const_fn_result_values_map.put (key, const_fn_result_sval);
1322 return const_fn_result_sval;
1323}
1324
808f4dfe
DM
1325/* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1326 attempt to get the character at that offset, returning either
1327 the svalue for the character constant, or NULL if unsuccessful. */
1328
1329const svalue *
1330region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1331 tree byte_offset_cst)
1332{
1333 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1334
1335 /* Adapted from fold_read_from_constant_string. */
1336 scalar_int_mode char_mode;
1337 if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1338 && compare_tree_int (byte_offset_cst,
1339 TREE_STRING_LENGTH (string_cst)) < 0
1340 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1341 &char_mode)
1342 && GET_MODE_SIZE (char_mode) == 1)
1343 {
1344 tree char_cst
1345 = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)),
1346 (TREE_STRING_POINTER (string_cst)
1347 [TREE_INT_CST_LOW (byte_offset_cst)]));
1348 return get_or_create_constant_svalue (char_cst);
1349 }
1350 return NULL;
1351}
1352
1353/* region consolidation. */
1354
1355/* Return the region for FNDECL, creating it if necessary. */
1356
1357const function_region *
1358region_model_manager::get_region_for_fndecl (tree fndecl)
1359{
1360 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1361
1362 function_region **slot = m_fndecls_map.get (fndecl);
1363 if (slot)
1364 return *slot;
1365 function_region *reg
1366 = new function_region (alloc_region_id (), &m_code_region, fndecl);
1367 m_fndecls_map.put (fndecl, reg);
1368 return reg;
1369}
1370
1371/* Return the region for LABEL, creating it if necessary. */
1372
1373const label_region *
1374region_model_manager::get_region_for_label (tree label)
1375{
1376 gcc_assert (TREE_CODE (label) == LABEL_DECL);
1377
1378 label_region **slot = m_labels_map.get (label);
1379 if (slot)
1380 return *slot;
1381
1382 tree fndecl = DECL_CONTEXT (label);
1383 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1384
1385 const function_region *func_reg = get_region_for_fndecl (fndecl);
1386 label_region *reg
1387 = new label_region (alloc_region_id (), func_reg, label);
1388 m_labels_map.put (label, reg);
1389 return reg;
1390}
1391
1392/* Return the region for EXPR, creating it if necessary. */
1393
1394const decl_region *
1395region_model_manager::get_region_for_global (tree expr)
1396{
1397 gcc_assert (TREE_CODE (expr) == VAR_DECL);
1398
1399 decl_region **slot = m_globals_map.get (expr);
1400 if (slot)
1401 return *slot;
1402 decl_region *reg
1403 = new decl_region (alloc_region_id (), &m_globals_region, expr);
1404 m_globals_map.put (expr, reg);
1405 return reg;
1406}
1407
3d41408c
DM
1408/* Return the region for an unknown access of type REGION_TYPE,
1409 creating it if necessary.
1410 This is a symbolic_region, where the pointer is an unknown_svalue
1411 of type &REGION_TYPE. */
1412
1413const region *
1414region_model_manager::get_unknown_symbolic_region (tree region_type)
1415{
1416 tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1417 const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1418 return get_symbolic_region (unknown_ptr);
1419}
1420
808f4dfe
DM
1421/* Return the region that describes accessing field FIELD of PARENT,
1422 creating it if necessary. */
1423
1424const region *
1425region_model_manager::get_field_region (const region *parent, tree field)
1426{
00cb0f58
DM
1427 gcc_assert (TREE_CODE (field) == FIELD_DECL);
1428
11d4ec5d
DM
1429 /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1430 if (parent->symbolic_for_unknown_ptr_p ())
3d41408c 1431 return get_unknown_symbolic_region (TREE_TYPE (field));
11d4ec5d 1432
808f4dfe
DM
1433 field_region::key_t key (parent, field);
1434 if (field_region *reg = m_field_regions.get (key))
1435 return reg;
1436
1437 field_region *field_reg
1438 = new field_region (alloc_region_id (), parent, field);
1439 m_field_regions.put (key, field_reg);
1440 return field_reg;
1441}
1442
1443/* Return the region that describes accessing the element of type
1444 ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1445
1446const region *
1447region_model_manager::get_element_region (const region *parent,
1448 tree element_type,
1449 const svalue *index)
1450{
3d41408c
DM
1451 /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1452 if (parent->symbolic_for_unknown_ptr_p ())
1453 return get_unknown_symbolic_region (element_type);
1454
808f4dfe
DM
1455 element_region::key_t key (parent, element_type, index);
1456 if (element_region *reg = m_element_regions.get (key))
1457 return reg;
1458
1459 element_region *element_reg
1460 = new element_region (alloc_region_id (), parent, element_type, index);
1461 m_element_regions.put (key, element_reg);
1462 return element_reg;
1463}
1464
1465/* Return the region that describes accessing the subregion of type
1466 ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1467 necessary. */
1468
1469const region *
1470region_model_manager::get_offset_region (const region *parent,
1471 tree type,
1472 const svalue *byte_offset)
1473{
3d41408c
DM
1474 /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1475 if (parent->symbolic_for_unknown_ptr_p ())
1476 return get_unknown_symbolic_region (type);
1477
808f4dfe
DM
1478 /* If BYTE_OFFSET is zero, return PARENT. */
1479 if (tree cst_offset = byte_offset->maybe_get_constant ())
1480 if (zerop (cst_offset))
1481 return get_cast_region (parent, type);
1482
1483 /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1484 to OFFSET_REGION(REG, (X + Y)). */
1485 if (const offset_region *parent_offset_reg
1486 = parent->dyn_cast_offset_region ())
1487 {
1488 const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1489 const svalue *sval_sum
1490 = get_or_create_binop (byte_offset->get_type (),
1491 PLUS_EXPR, sval_x, byte_offset);
1492 return get_offset_region (parent->get_parent_region (), type, sval_sum);
1493 }
1494
1495 offset_region::key_t key (parent, type, byte_offset);
1496 if (offset_region *reg = m_offset_regions.get (key))
1497 return reg;
1498
1499 offset_region *offset_reg
1500 = new offset_region (alloc_region_id (), parent, type, byte_offset);
1501 m_offset_regions.put (key, offset_reg);
1502 return offset_reg;
1503}
1504
e61ffa20
DM
1505/* Return the region that describes accessing the subregion of type
1506 TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1507
1508const region *
1509region_model_manager::get_sized_region (const region *parent,
1510 tree type,
1511 const svalue *byte_size_sval)
1512{
3d41408c
DM
1513 if (parent->symbolic_for_unknown_ptr_p ())
1514 return get_unknown_symbolic_region (type);
1515
e61ffa20
DM
1516 if (byte_size_sval->get_type () != size_type_node)
1517 byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1518
1519 /* If PARENT is already that size, return it. */
1520 const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1521 if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1522 if (tree size_cst = byte_size_sval->maybe_get_constant ())
1523 {
1524 tree comparison
1525 = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1526 if (comparison == boolean_true_node)
1527 return parent;
1528 }
1529
1530 sized_region::key_t key (parent, type, byte_size_sval);
1531 if (sized_region *reg = m_sized_regions.get (key))
1532 return reg;
1533
1534 sized_region *sized_reg
1535 = new sized_region (alloc_region_id (), parent, type, byte_size_sval);
1536 m_sized_regions.put (key, sized_reg);
1537 return sized_reg;
1538}
1539
808f4dfe
DM
1540/* Return the region that describes accessing PARENT_REGION as if
1541 it were of type TYPE, creating it if necessary. */
1542
1543const region *
1544region_model_manager::get_cast_region (const region *original_region,
1545 tree type)
1546{
1547 /* If types match, return ORIGINAL_REGION. */
1548 if (type == original_region->get_type ())
1549 return original_region;
1550
3d41408c
DM
1551 if (original_region->symbolic_for_unknown_ptr_p ())
1552 return get_unknown_symbolic_region (type);
1553
808f4dfe
DM
1554 cast_region::key_t key (original_region, type);
1555 if (cast_region *reg = m_cast_regions.get (key))
1556 return reg;
1557
1558 cast_region *cast_reg
1559 = new cast_region (alloc_region_id (), original_region, type);
1560 m_cast_regions.put (key, cast_reg);
1561 return cast_reg;
1562}
1563
1564/* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1565 if necessary. CALLING_FRAME may be NULL. */
1566
1567const frame_region *
1568region_model_manager::get_frame_region (const frame_region *calling_frame,
1569 function *fun)
1570{
1571 int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1572
1573 frame_region::key_t key (calling_frame, fun);
1574 if (frame_region *reg = m_frame_regions.get (key))
1575 return reg;
1576
1577 frame_region *frame_reg
1578 = new frame_region (alloc_region_id (), &m_stack_region, calling_frame,
1579 fun, index);
1580 m_frame_regions.put (key, frame_reg);
1581 return frame_reg;
1582}
1583
1584/* Return the region that describes dereferencing SVAL, creating it
1585 if necessary. */
1586
1587const region *
1588region_model_manager::get_symbolic_region (const svalue *sval)
1589{
1590 symbolic_region::key_t key (&m_root_region, sval);
1591 if (symbolic_region *reg = m_symbolic_regions.get (key))
1592 return reg;
1593
1594 symbolic_region *symbolic_reg
1595 = new symbolic_region (alloc_region_id (), &m_root_region, sval);
1596 m_symbolic_regions.put (key, symbolic_reg);
1597 return symbolic_reg;
1598}
1599
1600/* Return the region that describes accessing STRING_CST, creating it
1601 if necessary. */
1602
1603const string_region *
1604region_model_manager::get_region_for_string (tree string_cst)
1605{
1606 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1607
1608 string_region **slot = m_string_map.get (string_cst);
1609 if (slot)
1610 return *slot;
1611 string_region *reg
1612 = new string_region (alloc_region_id (), &m_root_region, string_cst);
1613 m_string_map.put (string_cst, reg);
1614 return reg;
1615}
1616
93e759fc
DM
1617/* Return the region that describes accessing BITS within PARENT as TYPE,
1618 creating it if necessary. */
1619
1620const region *
1621region_model_manager::get_bit_range (const region *parent, tree type,
1622 const bit_range &bits)
1623{
1624 gcc_assert (parent);
1625
3d41408c
DM
1626 if (parent->symbolic_for_unknown_ptr_p ())
1627 return get_unknown_symbolic_region (type);
1628
93e759fc
DM
1629 bit_range_region::key_t key (parent, type, bits);
1630 if (bit_range_region *reg = m_bit_range_regions.get (key))
1631 return reg;
1632
1633 bit_range_region *bit_range_reg
1634 = new bit_range_region (alloc_region_id (), parent, type, bits);
1635 m_bit_range_regions.put (key, bit_range_reg);
1636 return bit_range_reg;
1637}
1638
2402dc6b
DM
1639/* Return the region that describes accessing the IDX-th variadic argument
1640 within PARENT_FRAME, creating it if necessary. */
1641
1642const var_arg_region *
1643region_model_manager::get_var_arg_region (const frame_region *parent_frame,
1644 unsigned idx)
1645{
1646 gcc_assert (parent_frame);
1647
1648 var_arg_region::key_t key (parent_frame, idx);
1649 if (var_arg_region *reg = m_var_arg_regions.get (key))
1650 return reg;
1651
1652 var_arg_region *var_arg_reg
1653 = new var_arg_region (alloc_region_id (), parent_frame, idx);
1654 m_var_arg_regions.put (key, var_arg_reg);
1655 return var_arg_reg;
1656}
1657
808f4dfe
DM
1658/* If we see a tree code we don't know how to handle, rather than
1659 ICE or generate bogus results, create a dummy region, and notify
1660 CTXT so that it can mark the new state as being not properly
1661 modelled. The exploded graph can then stop exploring that path,
1662 since any diagnostics we might issue will have questionable
1663 validity. */
1664
1665const region *
1666region_model_manager::
1667get_region_for_unexpected_tree_code (region_model_context *ctxt,
1668 tree t,
1669 const dump_location_t &loc)
1670{
808f4dfe
DM
1671 tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1672 region *new_reg
1673 = new unknown_region (alloc_region_id (), &m_root_region, type);
b00a8304
DM
1674 if (ctxt)
1675 ctxt->on_unexpected_tree_code (t, loc);
808f4dfe
DM
1676 return new_reg;
1677}
1678
1679/* Return a new region describing a heap-allocated block of memory. */
1680
1681const region *
1682region_model_manager::create_region_for_heap_alloc ()
1683{
1684 region *reg
1685 = new heap_allocated_region (alloc_region_id (), &m_heap_region);
1686 m_managed_dynamic_regions.safe_push (reg);
1687 return reg;
1688}
1689
1690/* Return a new region describing a block of memory allocated within FRAME. */
1691
1692const region *
1693region_model_manager::create_region_for_alloca (const frame_region *frame)
1694{
1695 gcc_assert (frame);
1696 region *reg = new alloca_region (alloc_region_id (), frame);
1697 m_managed_dynamic_regions.safe_push (reg);
1698 return reg;
1699}
1700
1701/* Log OBJ to LOGGER. */
1702
1703template <typename T>
1704static void
1705log_managed_object (logger *logger, const T *obj)
1706{
1707 logger->start_log_line ();
1708 pretty_printer *pp = logger->get_printer ();
1709 pp_string (pp, " ");
1710 obj->dump_to_pp (pp, true);
1711 logger->end_log_line ();
1712}
1713
1714/* Specialization for frame_region, which also logs the count of locals
1715 managed by the frame_region. */
1716
1717template <>
1718void
1719log_managed_object (logger *logger, const frame_region *obj)
1720{
1721 logger->start_log_line ();
1722 pretty_printer *pp = logger->get_printer ();
1723 pp_string (pp, " ");
1724 obj->dump_to_pp (pp, true);
1725 pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
1726 logger->end_log_line ();
1727}
1728
1729/* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1730 If SHOW_OBJS is true, also dump the objects themselves. */
1731
1732template <typename K, typename T>
1733static void
1734log_uniq_map (logger *logger, bool show_objs, const char *title,
1735 const hash_map<K, T*> &uniq_map)
1736{
3989337e 1737 logger->log (" # %s: %li", title, (long)uniq_map.elements ());
b0702ac5
DM
1738 if (!show_objs)
1739 return;
1740 auto_vec<const T *> vec_objs (uniq_map.elements ());
1741 for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
1742 iter != uniq_map.end (); ++iter)
1743 vec_objs.quick_push ((*iter).second);
1744
1745 vec_objs.qsort (T::cmp_ptr_ptr);
1746
1747 unsigned i;
1748 const T *obj;
1749 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1750 log_managed_object<T> (logger, obj);
808f4dfe
DM
1751}
1752
1753/* Dump the number of objects that were managed by MAP to LOGGER.
1754 If SHOW_OBJS is true, also dump the objects themselves. */
1755
1756template <typename T>
1757static void
1758log_uniq_map (logger *logger, bool show_objs, const char *title,
1759 const consolidation_map<T> &map)
1760{
3989337e 1761 logger->log (" # %s: %li", title, (long)map.elements ());
b0702ac5
DM
1762 if (!show_objs)
1763 return;
1764
1765 auto_vec<const T *> vec_objs (map.elements ());
1766 for (typename consolidation_map<T>::iterator iter = map.begin ();
1767 iter != map.end (); ++iter)
1768 vec_objs.quick_push ((*iter).second);
1769
1770 vec_objs.qsort (T::cmp_ptr_ptr);
1771
1772 unsigned i;
1773 const T *obj;
1774 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1775 log_managed_object<T> (logger, obj);
808f4dfe
DM
1776}
1777
1778/* Dump the number of objects of each class that were managed by this
1779 manager to LOGGER.
1780 If SHOW_OBJS is true, also dump the objects themselves. */
1781
1782void
1783region_model_manager::log_stats (logger *logger, bool show_objs) const
1784{
1785 LOG_SCOPE (logger);
bb8e93eb
DM
1786 logger->log ("call string consolidation");
1787 m_empty_call_string.recursive_log (logger);
808f4dfe
DM
1788 logger->log ("svalue consolidation");
1789 log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
1790 log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
1791 if (m_unknown_NULL)
1792 log_managed_object (logger, m_unknown_NULL);
1793 log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
1794 log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
1795 log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
1796 log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
1797 log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
1798 log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
1799 log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
e61ffa20
DM
1800 log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
1801 log_uniq_map (logger, show_objs, "bits_within_svalue",
1802 m_bits_within_values_map);
808f4dfe
DM
1803 log_uniq_map (logger, show_objs, "unmergeable_svalue",
1804 m_unmergeable_values_map);
1805 log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
1806 log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
1807 log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
ded2c2c0
DM
1808 log_uniq_map (logger, show_objs, "asm_output_svalue",
1809 m_asm_output_values_map);
aee1adf2
DM
1810 log_uniq_map (logger, show_objs, "const_fn_result_svalue",
1811 m_const_fn_result_values_map);
ded2c2c0 1812
808f4dfe
DM
1813 logger->log ("max accepted svalue num_nodes: %i",
1814 m_max_complexity.m_num_nodes);
1815 logger->log ("max accepted svalue max_depth: %i",
1816 m_max_complexity.m_max_depth);
1817
1818 logger->log ("region consolidation");
1819 logger->log (" next region id: %i", m_next_region_id);
1820 log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
1821 log_uniq_map (logger, show_objs, "label_region", m_labels_map);
1822 log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
1823 log_uniq_map (logger, show_objs, "field_region", m_field_regions);
1824 log_uniq_map (logger, show_objs, "element_region", m_element_regions);
1825 log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
e61ffa20 1826 log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
808f4dfe
DM
1827 log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
1828 log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
1829 log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
1830 log_uniq_map (logger, show_objs, "string_region", m_string_map);
93e759fc 1831 log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
2402dc6b 1832 log_uniq_map (logger, show_objs, "var_arg_region", m_var_arg_regions);
808f4dfe
DM
1833 logger->log (" # managed dynamic regions: %i",
1834 m_managed_dynamic_regions.length ());
1835 m_store_mgr.log_stats (logger, show_objs);
8ca7fa84 1836 m_range_mgr->log_stats (logger, show_objs);
808f4dfe
DM
1837}
1838
1839/* Dump the number of objects of each class that were managed by this
1840 manager to LOGGER.
1841 If SHOW_OBJS is true, also dump the objects themselves.
1842 This is here so it can use log_uniq_map. */
1843
1844void
1845store_manager::log_stats (logger *logger, bool show_objs) const
1846{
1847 LOG_SCOPE (logger);
1848 log_uniq_map (logger, show_objs, "concrete_binding",
1849 m_concrete_binding_key_mgr);
1850 log_uniq_map (logger, show_objs, "symbolic_binding",
1851 m_symbolic_binding_key_mgr);
1852}
1853
5f6197d7
DM
1854/* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
1855 (using -fdump-analyzer-untracked). */
1856
1857static void
1858dump_untracked_region (const decl_region *decl_reg)
1859{
1860 tree decl = decl_reg->get_decl ();
1861 if (TREE_CODE (decl) != VAR_DECL)
1862 return;
c788a0ea
DM
1863 /* For now, don't emit the status of decls in the constant pool, to avoid
1864 differences in DejaGnu test results between targets that use these vs
1865 those that don't.
1866 (Eventually these decls should probably be untracked and we should test
1867 for that, but that's not stage 4 material). */
1868 if (DECL_IN_CONSTANT_POOL (decl))
1869 return;
5f6197d7
DM
1870 warning_at (DECL_SOURCE_LOCATION (decl), 0,
1871 "track %qD: %s",
1872 decl, (decl_reg->tracked_p () ? "yes" : "no"));
1873}
1874
1875/* Implementation of -fdump-analyzer-untracked. */
1876
1877void
1878region_model_manager::dump_untracked_regions () const
1879{
1880 for (auto iter : m_globals_map)
1881 {
1882 const decl_region *decl_reg = iter.second;
1883 dump_untracked_region (decl_reg);
1884 }
1885 for (auto frame_iter : m_frame_regions)
1886 {
1887 const frame_region *frame_reg = frame_iter.second;
1888 frame_reg->dump_untracked_regions ();
1889 }
1890}
1891
1892void
1893frame_region::dump_untracked_regions () const
1894{
1895 for (auto iter : m_locals)
1896 {
1897 const decl_region *decl_reg = iter.second;
1898 dump_untracked_region (decl_reg);
1899 }
1900}
1901
808f4dfe
DM
1902} // namespace ana
1903
1904#endif /* #if ENABLE_ANALYZER */