]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model-manager.cc
analyzer: handle function attribute "alloc_size" [PR110426]
[thirdparty/gcc.git] / gcc / analyzer / region-model-manager.cc
CommitLineData
808f4dfe 1/* Consolidation of svalues and regions.
83ffe9cd 2 Copyright (C) 2020-2023 Free Software Foundation, Inc.
808f4dfe
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
6341f14e 22#define INCLUDE_MEMORY
808f4dfe
DM
23#include "system.h"
24#include "coretypes.h"
25#include "tree.h"
26#include "diagnostic-core.h"
27#include "gimple-pretty-print.h"
28#include "function.h"
29#include "basic-block.h"
30#include "gimple.h"
31#include "gimple-iterator.h"
32#include "diagnostic-core.h"
33#include "graphviz.h"
34#include "options.h"
35#include "cgraph.h"
36#include "tree-dfa.h"
37#include "stringpool.h"
38#include "convert.h"
39#include "target.h"
40#include "fold-const.h"
41#include "tree-pretty-print.h"
808f4dfe 42#include "bitmap.h"
808f4dfe
DM
43#include "analyzer/analyzer.h"
44#include "analyzer/analyzer-logging.h"
45#include "ordered-hash-map.h"
46#include "options.h"
808f4dfe
DM
47#include "analyzer/supergraph.h"
48#include "sbitmap.h"
49#include "analyzer/call-string.h"
50#include "analyzer/program-point.h"
51#include "analyzer/store.h"
52#include "analyzer/region-model.h"
8ca7fa84 53#include "analyzer/constraint-manager.h"
808f4dfe
DM
54
55#if ENABLE_ANALYZER
56
57namespace ana {
58
59/* class region_model_manager. */
60
61/* region_model_manager's ctor. */
62
11a2ff8d
DM
63region_model_manager::region_model_manager (logger *logger)
64: m_logger (logger),
9d804f9b 65 m_next_symbol_id (0),
bb8e93eb 66 m_empty_call_string (),
9d804f9b
DM
67 m_root_region (alloc_symbol_id ()),
68 m_stack_region (alloc_symbol_id (), &m_root_region),
69 m_heap_region (alloc_symbol_id (), &m_root_region),
808f4dfe 70 m_unknown_NULL (NULL),
4f34f8cc 71 m_checking_feasibility (false),
808f4dfe 72 m_max_complexity (0, 0),
9d804f9b 73 m_code_region (alloc_symbol_id (), &m_root_region),
808f4dfe 74 m_fndecls_map (), m_labels_map (),
9d804f9b 75 m_globals_region (alloc_symbol_id (), &m_root_region),
808f4dfe 76 m_globals_map (),
9d804f9b
DM
77 m_thread_local_region (alloc_symbol_id (), &m_root_region),
78 m_errno_region (alloc_symbol_id (), &m_thread_local_region),
8ca7fa84 79 m_store_mgr (this),
07e30160
DM
80 m_range_mgr (new bounded_ranges_manager ()),
81 m_known_fn_mgr (logger)
808f4dfe
DM
82{
83}
84
85/* region_model_manager's dtor. Delete all of the managed svalues
86 and regions. */
87
88region_model_manager::~region_model_manager ()
89{
90 /* Delete consolidated svalues. */
91 for (constants_map_t::iterator iter = m_constants_map.begin ();
92 iter != m_constants_map.end (); ++iter)
93 delete (*iter).second;
94 for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
95 iter != m_unknowns_map.end (); ++iter)
96 delete (*iter).second;
97 delete m_unknown_NULL;
808f4dfe
DM
98 for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
99 iter != m_poisoned_values_map.end (); ++iter)
100 delete (*iter).second;
99988b0e
DM
101 for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
102 iter != m_setjmp_values_map.end (); ++iter)
103 delete (*iter).second;
808f4dfe
DM
104 for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
105 iter != m_initial_values_map.end (); ++iter)
106 delete (*iter).second;
107 for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
108 iter != m_pointer_values_map.end (); ++iter)
109 delete (*iter).second;
110 for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
111 iter != m_unaryop_values_map.end (); ++iter)
112 delete (*iter).second;
113 for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
114 iter != m_binop_values_map.end (); ++iter)
115 delete (*iter).second;
116 for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
117 iter != m_sub_values_map.end (); ++iter)
118 delete (*iter).second;
99988b0e
DM
119 for (auto iter : m_repeated_values_map)
120 delete iter.second;
121 for (auto iter : m_bits_within_values_map)
122 delete iter.second;
808f4dfe
DM
123 for (unmergeable_values_map_t::iterator iter
124 = m_unmergeable_values_map.begin ();
125 iter != m_unmergeable_values_map.end (); ++iter)
126 delete (*iter).second;
127 for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
128 iter != m_widening_values_map.end (); ++iter)
129 delete (*iter).second;
130 for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
131 iter != m_compound_values_map.end (); ++iter)
132 delete (*iter).second;
133 for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
134 iter != m_conjured_values_map.end (); ++iter)
135 delete (*iter).second;
99988b0e
DM
136 for (auto iter : m_asm_output_values_map)
137 delete iter.second;
138 for (auto iter : m_const_fn_result_values_map)
139 delete iter.second;
808f4dfe
DM
140
141 /* Delete consolidated regions. */
142 for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
143 iter != m_fndecls_map.end (); ++iter)
144 delete (*iter).second;
145 for (labels_map_t::iterator iter = m_labels_map.begin ();
146 iter != m_labels_map.end (); ++iter)
147 delete (*iter).second;
148 for (globals_map_t::iterator iter = m_globals_map.begin ();
149 iter != m_globals_map.end (); ++iter)
150 delete (*iter).second;
151 for (string_map_t::iterator iter = m_string_map.begin ();
152 iter != m_string_map.end (); ++iter)
153 delete (*iter).second;
8ca7fa84
DM
154
155 delete m_range_mgr;
808f4dfe
DM
156}
157
158/* Return true if C exceeds the complexity limit for svalues. */
159
160bool
161region_model_manager::too_complex_p (const complexity &c) const
162{
163 if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
164 return true;
165 return false;
166}
167
168/* If SVAL exceeds the complexity limit for svalues, delete it
169 and return true.
170 Otherwise update m_max_complexity and return false. */
171
172bool
173region_model_manager::reject_if_too_complex (svalue *sval)
174{
4f34f8cc 175 if (m_checking_feasibility)
60933a14
DM
176 return false;
177
808f4dfe
DM
178 const complexity &c = sval->get_complexity ();
179 if (!too_complex_p (c))
180 {
181 if (m_max_complexity.m_num_nodes < c.m_num_nodes)
182 m_max_complexity.m_num_nodes = c.m_num_nodes;
183 if (m_max_complexity.m_max_depth < c.m_max_depth)
184 m_max_complexity.m_max_depth = c.m_max_depth;
185 return false;
186 }
187
188 delete sval;
189 return true;
190}
191
192/* Macro for imposing a complexity limit on svalues, for use within
193 region_model_manager member functions.
194
195 If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
196 value of the same type.
197 Otherwise update m_max_complexity and carry on. */
198
199#define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
200 do { \
201 svalue *sval_ = (SVAL); \
202 tree type_ = sval_->get_type (); \
203 if (reject_if_too_complex (sval_)) \
204 return get_or_create_unknown_svalue (type_); \
205 } while (0)
206
207/* svalue consolidation. */
208
209/* Return the svalue * for a constant_svalue for CST_EXPR,
210 creating it if necessary.
211 The constant_svalue instances are reused, based on pointer equality
212 of trees */
213
214const svalue *
215region_model_manager::get_or_create_constant_svalue (tree cst_expr)
216{
217 gcc_assert (cst_expr);
2aefe248 218 gcc_assert (CONSTANT_CLASS_P (cst_expr));
808f4dfe
DM
219
220 constant_svalue **slot = m_constants_map.get (cst_expr);
221 if (slot)
222 return *slot;
9d804f9b
DM
223 constant_svalue *cst_sval
224 = new constant_svalue (alloc_symbol_id (), cst_expr);
808f4dfe
DM
225 RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
226 m_constants_map.put (cst_expr, cst_sval);
227 return cst_sval;
228}
229
1aff29d4
DM
230/* Return the svalue * for a constant_svalue for the INTEGER_CST
231 for VAL of type TYPE, creating it if necessary. */
232
233const svalue *
0e466e97
DM
234region_model_manager::get_or_create_int_cst (tree type,
235 const poly_wide_int_ref &cst)
1aff29d4
DM
236{
237 gcc_assert (type);
b86c0fe3 238 gcc_assert (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type));
0e466e97 239 tree tree_cst = wide_int_to_tree (type, cst);
1aff29d4
DM
240 return get_or_create_constant_svalue (tree_cst);
241}
242
dcfc7ac9
DM
243/* Return the svalue * for the constant_svalue for the NULL pointer
244 of POINTER_TYPE, creating it if necessary. */
245
246const svalue *
247region_model_manager::get_or_create_null_ptr (tree pointer_type)
248{
249 gcc_assert (pointer_type);
250 gcc_assert (POINTER_TYPE_P (pointer_type));
251 return get_or_create_int_cst (pointer_type, 0);
252}
253
808f4dfe
DM
254/* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
255 creating it if necessary.
256 The unknown_svalue instances are reused, based on pointer equality
257 of the types */
258
259const svalue *
260region_model_manager::get_or_create_unknown_svalue (tree type)
261{
4f34f8cc
DM
262 /* Don't create unknown values when doing feasibility testing;
263 instead, create a unique svalue. */
264 if (m_checking_feasibility)
265 return create_unique_svalue (type);
266
808f4dfe
DM
267 /* Special-case NULL, so that the hash_map can use NULL as the
268 "empty" value. */
269 if (type == NULL_TREE)
270 {
271 if (!m_unknown_NULL)
9d804f9b 272 m_unknown_NULL = new unknown_svalue (alloc_symbol_id (), type);
808f4dfe
DM
273 return m_unknown_NULL;
274 }
275
276 unknown_svalue **slot = m_unknowns_map.get (type);
277 if (slot)
278 return *slot;
9d804f9b 279 unknown_svalue *sval = new unknown_svalue (alloc_symbol_id (), type);
808f4dfe
DM
280 m_unknowns_map.put (type, sval);
281 return sval;
282}
283
4f34f8cc
DM
284/* Return a freshly-allocated svalue of TYPE, owned by this manager. */
285
286const svalue *
287region_model_manager::create_unique_svalue (tree type)
288{
9d804f9b 289 svalue *sval = new placeholder_svalue (alloc_symbol_id (), type, "unique");
4f34f8cc
DM
290 m_managed_dynamic_svalues.safe_push (sval);
291 return sval;
292}
293
808f4dfe
DM
294/* Return the svalue * for the initial value of REG, creating it if
295 necessary. */
296
297const svalue *
1eb90f46 298region_model_manager::get_or_create_initial_value (const region *reg,
299 bool check_poisoned)
808f4dfe 300{
1eb90f46 301 if (!reg->can_have_initial_svalue_p () && check_poisoned)
33255ad3
DM
302 return get_or_create_poisoned_svalue (POISON_KIND_UNINIT,
303 reg->get_type ());
304
808f4dfe
DM
305 /* The initial value of a cast is a cast of the initial value. */
306 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
307 {
308 const region *original_reg = cast_reg->get_original_region ();
309 return get_or_create_cast (cast_reg->get_type (),
310 get_or_create_initial_value (original_reg));
311 }
312
11d4ec5d
DM
313 /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
314 if (reg->symbolic_for_unknown_ptr_p ())
315 return get_or_create_unknown_svalue (reg->get_type ());
316
808f4dfe
DM
317 if (initial_svalue **slot = m_initial_values_map.get (reg))
318 return *slot;
9d804f9b
DM
319 initial_svalue *initial_sval
320 = new initial_svalue (alloc_symbol_id (), reg->get_type (), reg);
808f4dfe
DM
321 RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
322 m_initial_values_map.put (reg, initial_sval);
323 return initial_sval;
324}
325
326/* Return the svalue * for R using type TYPE, creating it if
327 necessary. */
328
329const svalue *
330region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
331 tree type)
332{
333 setjmp_svalue::key_t key (r, type);
334 if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
335 return *slot;
9d804f9b 336 setjmp_svalue *setjmp_sval = new setjmp_svalue (r, alloc_symbol_id (), type);
808f4dfe
DM
337 RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
338 m_setjmp_values_map.put (key, setjmp_sval);
339 return setjmp_sval;
340}
341
342/* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
343 necessary. */
344
345const svalue *
346region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
347 tree type)
348{
349 poisoned_svalue::key_t key (kind, type);
350 if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
351 return *slot;
9d804f9b
DM
352 poisoned_svalue *poisoned_sval
353 = new poisoned_svalue (kind, alloc_symbol_id (), type);
808f4dfe
DM
354 RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
355 m_poisoned_values_map.put (key, poisoned_sval);
356 return poisoned_sval;
357}
358
359/* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
360 creating it if necessary. */
361
362const svalue *
363region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
364{
365 /* If this is a symbolic region from dereferencing a pointer, and the types
366 match, then return the original pointer. */
367 if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
368 if (ptr_type == sym_reg->get_pointer ()->get_type ())
369 return sym_reg->get_pointer ();
370
371 region_svalue::key_t key (ptr_type, pointee);
372 if (region_svalue **slot = m_pointer_values_map.get (key))
373 return *slot;
9d804f9b
DM
374 region_svalue *sval
375 = new region_svalue (alloc_symbol_id (), ptr_type, pointee);
808f4dfe
DM
376 RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
377 m_pointer_values_map.put (key, sval);
378 return sval;
379}
380
381/* Subroutine of region_model_manager::get_or_create_unaryop.
382 Attempt to fold the inputs and return a simpler svalue *.
383 Otherwise, return NULL. */
384
385const svalue *
386region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
387 const svalue *arg)
388{
389 /* Ops on "unknown" are also unknown. */
390 if (arg->get_kind () == SK_UNKNOWN)
391 return get_or_create_unknown_svalue (type);
a113b143
DM
392 /* Likewise for "poisoned". */
393 else if (const poisoned_svalue *poisoned_sval
394 = arg->dyn_cast_poisoned_svalue ())
395 return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
396 type);
397
398 gcc_assert (arg->can_have_associated_state_p ());
808f4dfe
DM
399
400 switch (op)
401 {
402 default: break;
ecdb9322 403 case VIEW_CONVERT_EXPR:
808f4dfe
DM
404 case NOP_EXPR:
405 {
406 /* Handle redundant casts. */
407 if (arg->get_type ()
408 && useless_type_conversion_p (arg->get_type (), type))
409 return arg;
410
411 /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
412 => "cast<TYPE> (innermost_arg)",
413 unless INNER_TYPE is narrower than TYPE. */
414 if (const svalue *innermost_arg = arg->maybe_undo_cast ())
415 {
416 tree inner_type = arg->get_type ();
417 if (TYPE_SIZE (type)
418 && TYPE_SIZE (inner_type)
419 && (fold_binary (LE_EXPR, boolean_type_node,
420 TYPE_SIZE (type), TYPE_SIZE (inner_type))
421 == boolean_true_node))
422 return maybe_fold_unaryop (type, op, innermost_arg);
423 }
111fd515
DM
424 /* Avoid creating symbolic regions for pointer casts by
425 simplifying (T*)(&REGION) to ((T*)&REGION). */
426 if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
427 if (POINTER_TYPE_P (type)
428 && region_sval->get_type ()
429 && POINTER_TYPE_P (region_sval->get_type ()))
430 return get_ptr_svalue (type, region_sval->get_pointee ());
808f4dfe
DM
431 }
432 break;
433 case TRUTH_NOT_EXPR:
434 {
435 /* Invert comparisons e.g. "!(x == y)" => "x != y". */
436 if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
437 if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
438 {
439 enum tree_code inv_op
440 = invert_tree_comparison (binop->get_op (),
441 HONOR_NANS (binop->get_type ()));
442 if (inv_op != ERROR_MARK)
443 return get_or_create_binop (binop->get_type (), inv_op,
444 binop->get_arg0 (),
445 binop->get_arg1 ());
446 }
447 }
448 break;
7f42f7ad
DM
449 case NEGATE_EXPR:
450 {
451 /* -(-(VAL)) is VAL, for integer types. */
452 if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ())
453 if (unaryop->get_op () == NEGATE_EXPR
454 && type == unaryop->get_type ()
455 && type
456 && INTEGRAL_TYPE_P (type))
457 return unaryop->get_arg ();
458 }
459 break;
808f4dfe
DM
460 }
461
462 /* Constants. */
463 if (tree cst = arg->maybe_get_constant ())
464 if (tree result = fold_unary (op, type, cst))
2aefe248
DM
465 {
466 if (CONSTANT_CLASS_P (result))
467 return get_or_create_constant_svalue (result);
468
469 /* fold_unary can return casts of constants; try to handle them. */
470 if (op != NOP_EXPR
471 && type
472 && TREE_CODE (result) == NOP_EXPR
473 && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
474 {
475 const svalue *inner_cst
476 = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
477 return get_or_create_cast (type,
478 get_or_create_cast (TREE_TYPE (result),
479 inner_cst));
480 }
481 }
808f4dfe
DM
482
483 return NULL;
484}
485
486/* Return the svalue * for an unary operation OP on ARG with a result of
487 type TYPE, creating it if necessary. */
488
489const svalue *
490region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
491 const svalue *arg)
492{
493 if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
494 return folded;
495 unaryop_svalue::key_t key (type, op, arg);
496 if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
497 return *slot;
9d804f9b
DM
498 unaryop_svalue *unaryop_sval
499 = new unaryop_svalue (alloc_symbol_id (), type, op, arg);
808f4dfe
DM
500 RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
501 m_unaryop_values_map.put (key, unaryop_sval);
502 return unaryop_sval;
503}
504
ecdb9322
DM
505/* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
506 Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
507 of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
508 and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
509 on. */
510
511static enum tree_code
512get_code_for_cast (tree dst_type, tree src_type)
513{
514 gcc_assert (dst_type);
515 if (!src_type)
516 return NOP_EXPR;
517
778aca1b 518 if (SCALAR_FLOAT_TYPE_P (src_type))
ecdb9322
DM
519 {
520 if (TREE_CODE (dst_type) == INTEGER_TYPE)
521 return FIX_TRUNC_EXPR;
522 else
523 return VIEW_CONVERT_EXPR;
524 }
525
526 return NOP_EXPR;
527}
528
808f4dfe
DM
529/* Return the svalue * for a cast of ARG to type TYPE, creating it
530 if necessary. */
531
532const svalue *
533region_model_manager::get_or_create_cast (tree type, const svalue *arg)
534{
366bd1ac 535 gcc_assert (type);
45b999f6
DM
536
537 /* No-op if the types are the same. */
538 if (type == arg->get_type ())
539 return arg;
540
541 /* Don't attempt to handle casts involving vector types for now. */
778aca1b 542 if (VECTOR_TYPE_P (type)
45b999f6 543 || (arg->get_type ()
778aca1b 544 && VECTOR_TYPE_P (arg->get_type ())))
45b999f6
DM
545 return get_or_create_unknown_svalue (type);
546
ecdb9322
DM
547 enum tree_code op = get_code_for_cast (type, arg->get_type ());
548 return get_or_create_unaryop (type, op, arg);
808f4dfe
DM
549}
550
ec3fafa9
DM
551/* Subroutine of region_model_manager::maybe_fold_binop for handling
552 (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
553 optimize_bit_field_compare, where CST is from ARG1.
554
555 Support masking out bits from a compound_svalue for comparing a bitfield
556 against a value, as generated by optimize_bit_field_compare for
557 BITFIELD == VALUE.
558
559 If COMPOUND_SVAL has a value for the appropriate bits, return it,
560 shifted accordingly.
561 Otherwise return NULL. */
562
563const svalue *
564region_model_manager::
565maybe_undo_optimize_bit_field_compare (tree type,
566 const compound_svalue *compound_sval,
567 tree cst,
568 const svalue *arg1)
569{
570 if (type != unsigned_char_type_node)
571 return NULL;
572
573 const binding_map &map = compound_sval->get_map ();
574 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
575 /* If "mask" is a contiguous range of set bits, see if the
576 compound_sval has a value for those bits. */
577 bit_range bits (0, 0);
578 if (!bit_range::from_mask (mask, &bits))
579 return NULL;
580
581 bit_range bound_bits (bits);
582 if (BYTES_BIG_ENDIAN)
583 bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
584 bits.m_size_in_bits);
585 const concrete_binding *conc
e61ffa20 586 = get_store_manager ()->get_concrete_binding (bound_bits);
ec3fafa9
DM
587 const svalue *sval = map.get (conc);
588 if (!sval)
589 return NULL;
590
591 /* We have a value;
592 shift it by the correct number of bits. */
593 const svalue *lhs = get_or_create_cast (type, sval);
594 HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
1aff29d4 595 const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
ec3fafa9
DM
596 const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
597 lhs, shift_sval);
598 /* Reapply the mask (needed for negative
599 signed bitfields). */
600 return get_or_create_binop (type, BIT_AND_EXPR,
601 shifted_sval, arg1);
602}
603
808f4dfe
DM
604/* Subroutine of region_model_manager::get_or_create_binop.
605 Attempt to fold the inputs and return a simpler svalue *.
606 Otherwise, return NULL. */
607
608const svalue *
609region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
610 const svalue *arg0,
611 const svalue *arg1)
612{
613 tree cst0 = arg0->maybe_get_constant ();
614 tree cst1 = arg1->maybe_get_constant ();
615 /* (CST OP CST). */
616 if (cst0 && cst1)
617 {
618 if (tree result = fold_binary (op, type, cst0, cst1))
619 if (CONSTANT_CLASS_P (result))
620 return get_or_create_constant_svalue (result);
621 }
622
0e466e97 623 if ((type && FLOAT_TYPE_P (type))
808f4dfe
DM
624 || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
625 || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
626 return NULL;
627
628 switch (op)
629 {
630 default:
631 break;
632 case POINTER_PLUS_EXPR:
633 case PLUS_EXPR:
634 /* (VAL + 0) -> VAL. */
3a32fb2e
DM
635 if (cst1 && zerop (cst1))
636 return get_or_create_cast (type, arg0);
808f4dfe
DM
637 break;
638 case MINUS_EXPR:
639 /* (VAL - 0) -> VAL. */
3a32fb2e
DM
640 if (cst1 && zerop (cst1))
641 return get_or_create_cast (type, arg0);
0b737090
DM
642 /* (0 - VAL) -> -VAL. */
643 if (cst0 && zerop (cst0))
644 return get_or_create_unaryop (type, NEGATE_EXPR, arg1);
0e466e97
DM
645 /* (X + Y) - X -> Y. */
646 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
647 if (binop->get_op () == PLUS_EXPR)
648 if (binop->get_arg0 () == arg1)
649 return get_or_create_cast (type, binop->get_arg1 ());
808f4dfe
DM
650 break;
651 case MULT_EXPR:
652 /* (VAL * 0). */
fc02b568 653 if (cst1 && zerop (cst1) && INTEGRAL_TYPE_P (type))
808f4dfe
DM
654 return get_or_create_constant_svalue (build_int_cst (type, 0));
655 /* (VAL * 1) -> VAL. */
656 if (cst1 && integer_onep (cst1))
021077b9
DM
657 /* TODO: we ought to have a cast to TYPE here, but doing so introduces
658 regressions; see PR analyzer/110902. */
808f4dfe
DM
659 return arg0;
660 break;
df2b78d4
DM
661 case BIT_AND_EXPR:
662 if (cst1)
d3b1ef7a
DM
663 {
664 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
665 /* "(ARG0 & 0)" -> "0". */
666 return get_or_create_constant_svalue (build_int_cst (type, 0));
667
d3b1ef7a
DM
668 if (const compound_svalue *compound_sval
669 = arg0->dyn_cast_compound_svalue ())
ec3fafa9
DM
670 if (const svalue *sval
671 = maybe_undo_optimize_bit_field_compare (type,
672 compound_sval,
673 cst1, arg1))
674 return sval;
d3b1ef7a 675 }
4f34f8cc
DM
676 if (arg0->get_type () == boolean_type_node
677 && arg1->get_type () == boolean_type_node)
678 {
679 /* If the LHS are both _Bool, then... */
680 /* ..."(1 & x) -> x". */
681 if (cst0 && !zerop (cst0))
682 return get_or_create_cast (type, arg1);
683 /* ..."(x & 1) -> x". */
684 if (cst1 && !zerop (cst1))
685 return get_or_create_cast (type, arg0);
686 /* ..."(0 & x) -> 0". */
687 if (cst0 && zerop (cst0))
688 return get_or_create_int_cst (type, 0);
689 /* ..."(x & 0) -> 0". */
690 if (cst1 && zerop (cst1))
691 return get_or_create_int_cst (type, 0);
692 }
693 break;
694 case BIT_IOR_EXPR:
695 if (arg0->get_type () == boolean_type_node
696 && arg1->get_type () == boolean_type_node)
697 {
698 /* If the LHS are both _Bool, then... */
699 /* ..."(1 | x) -> 1". */
700 if (cst0 && !zerop (cst0))
701 return get_or_create_int_cst (type, 1);
702 /* ..."(x | 1) -> 1". */
703 if (cst1 && !zerop (cst1))
704 return get_or_create_int_cst (type, 1);
705 /* ..."(0 | x) -> x". */
706 if (cst0 && zerop (cst0))
707 return get_or_create_cast (type, arg1);
708 /* ..."(x | 0) -> x". */
709 if (cst1 && zerop (cst1))
710 return get_or_create_cast (type, arg0);
711 }
df2b78d4 712 break;
808f4dfe
DM
713 case TRUTH_ANDIF_EXPR:
714 case TRUTH_AND_EXPR:
715 if (cst1)
716 {
fc02b568 717 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
808f4dfe
DM
718 /* "(ARG0 && 0)" -> "0". */
719 return get_or_create_constant_svalue (build_int_cst (type, 0));
720 else
721 /* "(ARG0 && nonzero-cst)" -> "ARG0". */
722 return get_or_create_cast (type, arg0);
723 }
724 break;
725 case TRUTH_ORIF_EXPR:
726 case TRUTH_OR_EXPR:
727 if (cst1)
728 {
729 if (zerop (cst1))
730 /* "(ARG0 || 0)" -> "ARG0". */
731 return get_or_create_cast (type, arg0);
732 else
733 /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
734 return get_or_create_cast (type, arg1);
735 }
736 break;
737 }
738
739 /* For associative ops, fold "(X op CST_A) op CST_B)" to
740 "X op (CST_A op CST_B)". */
741 if (cst1 && associative_tree_code (op))
742 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
743 if (binop->get_op () == op
0e466e97 744 && binop->get_arg1 ()->maybe_get_constant ())
808f4dfe
DM
745 return get_or_create_binop
746 (type, op, binop->get_arg0 (),
747 get_or_create_binop (type, op,
748 binop->get_arg1 (), arg1));
749
750 /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
751 can fold:
752 "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
753 e.g. in data-model-1.c: test_4c. */
754 if (cst1 && op == POINTER_PLUS_EXPR)
755 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
756 if (binop->get_op () == POINTER_PLUS_EXPR)
757 if (binop->get_arg1 ()->maybe_get_constant ())
758 return get_or_create_binop
759 (type, op, binop->get_arg0 (),
760 get_or_create_binop (size_type_node, op,
761 binop->get_arg1 (), arg1));
762
0e466e97
DM
763 /* Distribute multiplication by a constant through addition/subtraction:
764 (X + Y) * CST => (X * CST) + (Y * CST). */
765 if (cst1 && op == MULT_EXPR)
766 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
767 if (binop->get_op () == PLUS_EXPR
768 || binop->get_op () == MINUS_EXPR)
769 {
770 return get_or_create_binop
771 (type, binop->get_op (),
772 get_or_create_binop (type, op,
773 binop->get_arg0 (), arg1),
774 get_or_create_binop (type, op,
775 binop->get_arg1 (), arg1));
776 }
777
808f4dfe
DM
778 /* etc. */
779
780 return NULL;
781}
782
783/* Return the svalue * for an binary operation OP on ARG0 and ARG1
784 with a result of type TYPE, creating it if necessary. */
785
786const svalue *
787region_model_manager::get_or_create_binop (tree type, enum tree_code op,
788 const svalue *arg0,
789 const svalue *arg1)
790{
791 /* For commutative ops, put any constant on the RHS. */
792 if (arg0->maybe_get_constant () && commutative_tree_code (op))
793 std::swap (arg0, arg1);
794
795 if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
796 return folded;
797
a113b143
DM
798 /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
799 it via an identity in maybe_fold_binop). */
800 if (!arg0->can_have_associated_state_p ()
801 || !arg1->can_have_associated_state_p ())
802 return get_or_create_unknown_svalue (type);
803
808f4dfe
DM
804 binop_svalue::key_t key (type, op, arg0, arg1);
805 if (binop_svalue **slot = m_binop_values_map.get (key))
806 return *slot;
9d804f9b
DM
807 binop_svalue *binop_sval
808 = new binop_svalue (alloc_symbol_id (), type, op, arg0, arg1);
808f4dfe
DM
809 RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
810 m_binop_values_map.put (key, binop_sval);
811 return binop_sval;
812}
813
814/* Subroutine of region_model_manager::get_or_create_sub_svalue.
815 Return a folded svalue, or NULL. */
816
817const svalue *
818region_model_manager::maybe_fold_sub_svalue (tree type,
819 const svalue *parent_svalue,
820 const region *subregion)
821{
a113b143
DM
822 /* Subvalues of "unknown"/"poisoned" are unknown. */
823 if (!parent_svalue->can_have_associated_state_p ())
808f4dfe
DM
824 return get_or_create_unknown_svalue (type);
825
826 /* If we have a subregion of a zero-fill, it's zero. */
827 if (const unaryop_svalue *unary
828 = parent_svalue->dyn_cast_unaryop_svalue ())
829 {
ecdb9322
DM
830 if (unary->get_op () == NOP_EXPR
831 || unary->get_op () == VIEW_CONVERT_EXPR)
808f4dfe 832 if (tree cst = unary->get_arg ()->maybe_get_constant ())
84832cab 833 if (zerop (cst) && type)
808f4dfe
DM
834 {
835 const svalue *cst_sval
836 = get_or_create_constant_svalue (cst);
837 return get_or_create_cast (type, cst_sval);
838 }
839 }
840
841 /* Handle getting individual chars from a STRING_CST. */
842 if (tree cst = parent_svalue->maybe_get_constant ())
843 if (TREE_CODE (cst) == STRING_CST)
2ac7b19f
DM
844 {
845 /* If we have a concrete 1-byte access within the parent region... */
846 byte_range subregion_bytes (0, 0);
847 if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
84832cab
DM
848 && subregion_bytes.m_size_in_bytes == 1
849 && type)
2ac7b19f
DM
850 {
851 /* ...then attempt to get that char from the STRING_CST. */
852 HOST_WIDE_INT hwi_start_byte
853 = subregion_bytes.m_start_byte_offset.to_shwi ();
854 tree cst_idx
855 = build_int_cst_type (size_type_node, hwi_start_byte);
808f4dfe
DM
856 if (const svalue *char_sval
857 = maybe_get_char_from_string_cst (cst, cst_idx))
858 return get_or_create_cast (type, char_sval);
2ac7b19f
DM
859 }
860 }
808f4dfe 861
808f4dfe 862 if (const initial_svalue *init_sval
e61ffa20 863 = parent_svalue->dyn_cast_initial_svalue ())
808f4dfe 864 {
e61ffa20
DM
865 /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
866 i.e.
867 Subvalue(InitialValue(R1), FieldRegion(R2, F))
868 -> InitialValue(FieldRegion(R1, F)). */
808f4dfe
DM
869 if (const field_region *field_reg = subregion->dyn_cast_field_region ())
870 {
871 const region *field_reg_new
872 = get_field_region (init_sval->get_region (),
873 field_reg->get_field ());
874 return get_or_create_initial_value (field_reg_new);
875 }
e61ffa20
DM
876 /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
877 i.e.
878 Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
879 -> InitialValue(ElementRegion(R1, IDX)). */
880 if (const element_region *element_reg = subregion->dyn_cast_element_region ())
881 {
882 const region *element_reg_new
883 = get_element_region (init_sval->get_region (),
884 element_reg->get_type (),
885 element_reg->get_index ());
886 return get_or_create_initial_value (element_reg_new);
887 }
808f4dfe
DM
888 }
889
e61ffa20
DM
890 if (const repeated_svalue *repeated_sval
891 = parent_svalue->dyn_cast_repeated_svalue ())
79e746bb
DM
892 if (type)
893 return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
e61ffa20 894
808f4dfe
DM
895 return NULL;
896}
897
898/* Return the svalue * for extracting a subvalue of type TYPE from
899 PARENT_SVALUE based on SUBREGION, creating it if necessary. */
900
901const svalue *
902region_model_manager::get_or_create_sub_svalue (tree type,
903 const svalue *parent_svalue,
904 const region *subregion)
905{
906 if (const svalue *folded
907 = maybe_fold_sub_svalue (type, parent_svalue, subregion))
908 return folded;
909
910 sub_svalue::key_t key (type, parent_svalue, subregion);
911 if (sub_svalue **slot = m_sub_values_map.get (key))
912 return *slot;
913 sub_svalue *sub_sval
9d804f9b 914 = new sub_svalue (alloc_symbol_id (), type, parent_svalue, subregion);
808f4dfe
DM
915 RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
916 m_sub_values_map.put (key, sub_sval);
917 return sub_sval;
918}
919
e61ffa20
DM
920/* Subroutine of region_model_manager::get_or_create_repeated_svalue.
921 Return a folded svalue, or NULL. */
922
923const svalue *
924region_model_manager::maybe_fold_repeated_svalue (tree type,
925 const svalue *outer_size,
926 const svalue *inner_svalue)
927{
a113b143
DM
928 /* Repeated "unknown"/"poisoned" is unknown. */
929 if (!outer_size->can_have_associated_state_p ()
930 || !inner_svalue->can_have_associated_state_p ())
931 return get_or_create_unknown_svalue (type);
932
e61ffa20
DM
933 /* If INNER_SVALUE is the same size as OUTER_SIZE,
934 turn into simply a cast. */
935 if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
936 {
937 HOST_WIDE_INT num_bytes_inner_svalue
938 = int_size_in_bytes (inner_svalue->get_type ());
939 if (num_bytes_inner_svalue != -1)
940 if (num_bytes_inner_svalue
941 == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
942 {
943 if (type)
944 return get_or_create_cast (type, inner_svalue);
945 else
946 return inner_svalue;
947 }
948 }
949
950 /* Handle zero-fill of a specific type. */
951 if (tree cst = inner_svalue->maybe_get_constant ())
952 if (zerop (cst) && type)
953 return get_or_create_cast (type, inner_svalue);
954
955 return NULL;
956}
957
958/* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
959 enough times to be of size OUTER_SIZE, creating it if necessary.
960 e.g. for filling buffers with a constant value. */
961
962const svalue *
963region_model_manager::get_or_create_repeated_svalue (tree type,
964 const svalue *outer_size,
965 const svalue *inner_svalue)
966{
967 if (const svalue *folded
968 = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
969 return folded;
970
971 repeated_svalue::key_t key (type, outer_size, inner_svalue);
972 if (repeated_svalue **slot = m_repeated_values_map.get (key))
973 return *slot;
974 repeated_svalue *repeated_sval
9d804f9b 975 = new repeated_svalue (alloc_symbol_id (), type, outer_size, inner_svalue);
e61ffa20
DM
976 RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
977 m_repeated_values_map.put (key, repeated_sval);
978 return repeated_sval;
979}
980
981/* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
982 Return true and write the result to OUT if successful.
983 Return false otherwise. */
984
985static bool
986get_bit_range_for_field (tree field, bit_range *out)
987{
988 bit_size_t bit_size;
989 if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
990 return false;
991 int field_bit_offset = int_bit_position (field);
992 *out = bit_range (field_bit_offset, bit_size);
993 return true;
994}
995
996/* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
997 Return true and write the result to OUT if successful.
998 Return false otherwise. */
999
1000static bool
1001get_byte_range_for_field (tree field, byte_range *out)
1002{
1003 bit_range field_bits (0, 0);
1004 if (!get_bit_range_for_field (field, &field_bits))
1005 return false;
1006 return field_bits.as_byte_range (out);
1007}
1008
1009/* Attempt to determine if there is a specific field within RECORD_TYPE
1010 at BYTES. If so, return it, and write the location of BYTES relative
1011 to the field to *OUT_RANGE_WITHIN_FIELD.
1012 Otherwise, return NULL_TREE.
1013 For example, given:
1014 struct foo { uint32 a; uint32; b};
1015 and
1016 bytes = {bytes 6-7} (of foo)
1017 we have bytes 3-4 of field b. */
1018
1019static tree
1020get_field_at_byte_range (tree record_type, const byte_range &bytes,
1021 byte_range *out_range_within_field)
1022{
1023 bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
1024
1025 tree field = get_field_at_bit_offset (record_type, bit_offset);
1026 if (!field)
1027 return NULL_TREE;
1028
1029 byte_range field_bytes (0,0);
1030 if (!get_byte_range_for_field (field, &field_bytes))
1031 return NULL_TREE;
1032
1033 /* Is BYTES fully within field_bytes? */
1034 byte_range bytes_within_field (0,0);
1035 if (!field_bytes.contains_p (bytes, &bytes_within_field))
1036 return NULL_TREE;
1037
1038 *out_range_within_field = bytes_within_field;
1039 return field;
1040}
1041
1042/* Subroutine of region_model_manager::get_or_create_bits_within.
1043 Return a folded svalue, or NULL. */
1044
1045const svalue *
1046region_model_manager::maybe_fold_bits_within_svalue (tree type,
1047 const bit_range &bits,
1048 const svalue *inner_svalue)
1049{
1050 tree inner_type = inner_svalue->get_type ();
1051 /* Fold:
1052 BITS_WITHIN ((0, sizeof (VAL), VAL))
1053 to:
1054 CAST(TYPE, VAL). */
1055 if (bits.m_start_bit_offset == 0 && inner_type)
1056 {
1057 bit_size_t inner_type_size;
1058 if (int_size_in_bits (inner_type, &inner_type_size))
1059 if (inner_type_size == bits.m_size_in_bits)
1060 {
1061 if (type)
1062 return get_or_create_cast (type, inner_svalue);
1063 else
1064 return inner_svalue;
1065 }
1066 }
1067
1068 /* Kind-specific folding. */
1069 if (const svalue *sval
1070 = inner_svalue->maybe_fold_bits_within (type, bits, this))
1071 return sval;
1072
1073 byte_range bytes (0,0);
1074 if (bits.as_byte_range (&bytes) && inner_type)
1075 switch (TREE_CODE (inner_type))
1076 {
1077 default:
1078 break;
1079 case ARRAY_TYPE:
1080 {
1081 /* Fold:
1082 BITS_WITHIN (range, KIND(REG))
1083 to:
1084 BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1085 if range1 is a byte-range fully within one ELEMENT. */
1086 tree element_type = TREE_TYPE (inner_type);
1087 HOST_WIDE_INT element_byte_size
1088 = int_size_in_bytes (element_type);
1089 if (element_byte_size > 0)
1090 {
1091 HOST_WIDE_INT start_idx
1092 = (bytes.get_start_byte_offset ().to_shwi ()
1093 / element_byte_size);
1094 HOST_WIDE_INT last_idx
1095 = (bytes.get_last_byte_offset ().to_shwi ()
1096 / element_byte_size);
1097 if (start_idx == last_idx)
1098 {
1099 if (const initial_svalue *initial_sval
1100 = inner_svalue->dyn_cast_initial_svalue ())
1101 {
1102 bit_offset_t start_of_element
1103 = start_idx * element_byte_size * BITS_PER_UNIT;
1104 bit_range bits_within_element
1105 (bits.m_start_bit_offset - start_of_element,
1106 bits.m_size_in_bits);
1107 const svalue *idx_sval
1108 = get_or_create_int_cst (integer_type_node, start_idx);
1109 const region *element_reg =
1110 get_element_region (initial_sval->get_region (),
1111 element_type, idx_sval);
1112 const svalue *element_reg_sval
1113 = get_or_create_initial_value (element_reg);
1114 return get_or_create_bits_within (type,
1115 bits_within_element,
1116 element_reg_sval);
1117 }
1118 }
1119 }
1120 }
1121 break;
1122 case RECORD_TYPE:
1123 {
1124 /* Fold:
1125 BYTES_WITHIN (range, KIND(REG))
1126 to:
1127 BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1128 if range1 is fully within FIELD. */
1129 byte_range bytes_within_field (0, 0);
1130 if (tree field = get_field_at_byte_range (inner_type, bytes,
1131 &bytes_within_field))
1132 {
1133 if (const initial_svalue *initial_sval
1134 = inner_svalue->dyn_cast_initial_svalue ())
1135 {
1136 const region *field_reg =
1137 get_field_region (initial_sval->get_region (), field);
1138 const svalue *initial_reg_sval
1139 = get_or_create_initial_value (field_reg);
1140 return get_or_create_bits_within
1141 (type,
1142 bytes_within_field.as_bit_range (),
1143 initial_reg_sval);
1144 }
1145 }
1146 }
1147 break;
1148 }
1149 return NULL;
1150}
1151
1152/* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1153 creating it if necessary. */
1154
1155const svalue *
1156region_model_manager::get_or_create_bits_within (tree type,
1157 const bit_range &bits,
1158 const svalue *inner_svalue)
1159{
1160 if (const svalue *folded
1161 = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1162 return folded;
1163
1164 bits_within_svalue::key_t key (type, bits, inner_svalue);
1165 if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1166 return *slot;
1167 bits_within_svalue *bits_within_sval
9d804f9b 1168 = new bits_within_svalue (alloc_symbol_id (), type, bits, inner_svalue);
e61ffa20
DM
1169 RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1170 m_bits_within_values_map.put (key, bits_within_sval);
1171 return bits_within_sval;
1172}
1173
808f4dfe
DM
1174/* Return the svalue * that decorates ARG as being unmergeable,
1175 creating it if necessary. */
1176
1177const svalue *
1178region_model_manager::get_or_create_unmergeable (const svalue *arg)
1179{
1180 if (arg->get_kind () == SK_UNMERGEABLE)
1181 return arg;
1182
1183 if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1184 return *slot;
9d804f9b
DM
1185 unmergeable_svalue *unmergeable_sval
1186 = new unmergeable_svalue (alloc_symbol_id (), arg);
808f4dfe
DM
1187 RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1188 m_unmergeable_values_map.put (arg, unmergeable_sval);
1189 return unmergeable_sval;
1190}
1191
1192/* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1193 and ITER_SVAL at POINT, creating it if necessary. */
1194
1195const svalue *
e6fe02d8
DM
1196region_model_manager::
1197get_or_create_widening_svalue (tree type,
1198 const function_point &point,
1199 const svalue *base_sval,
1200 const svalue *iter_sval)
808f4dfe 1201{
2fc20138
DM
1202 gcc_assert (base_sval->get_kind () != SK_WIDENING);
1203 gcc_assert (iter_sval->get_kind () != SK_WIDENING);
808f4dfe
DM
1204 widening_svalue::key_t key (type, point, base_sval, iter_sval);
1205 if (widening_svalue **slot = m_widening_values_map.get (key))
1206 return *slot;
1207 widening_svalue *widening_sval
9d804f9b
DM
1208 = new widening_svalue (alloc_symbol_id (), type, point, base_sval,
1209 iter_sval);
808f4dfe
DM
1210 RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1211 m_widening_values_map.put (key, widening_sval);
1212 return widening_sval;
1213}
1214
1215/* Return the svalue * of type TYPE for the compound values in MAP,
1216 creating it if necessary. */
1217
1218const svalue *
1219region_model_manager::get_or_create_compound_svalue (tree type,
1220 const binding_map &map)
1221{
1222 compound_svalue::key_t tmp_key (type, &map);
1223 if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1224 return *slot;
1225 compound_svalue *compound_sval
9d804f9b 1226 = new compound_svalue (alloc_symbol_id (), type, map);
808f4dfe
DM
1227 RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1228 /* Use make_key rather than reusing the key, so that we use a
1229 ptr to compound_sval's binding_map, rather than the MAP param. */
1230 m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1231 return compound_sval;
1232}
1233
3734527d
DM
1234/* class conjured_purge. */
1235
1236/* Purge state relating to SVAL. */
1237
1238void
1239conjured_purge::purge (const conjured_svalue *sval) const
1240{
1241 m_model->purge_state_involving (sval, m_ctxt);
1242}
1243
808f4dfe 1244/* Return the svalue * of type TYPE for the value conjured for ID_REG
3734527d
DM
1245 at STMT, creating it if necessary.
1246 Use P to purge existing state from the svalue, for the case where a
1247 conjured_svalue would be reused along an execution path. */
808f4dfe
DM
1248
1249const svalue *
1250region_model_manager::get_or_create_conjured_svalue (tree type,
1251 const gimple *stmt,
3734527d
DM
1252 const region *id_reg,
1253 const conjured_purge &p)
808f4dfe
DM
1254{
1255 conjured_svalue::key_t key (type, stmt, id_reg);
1256 if (conjured_svalue **slot = m_conjured_values_map.get (key))
3734527d
DM
1257 {
1258 const conjured_svalue *sval = *slot;
1259 /* We're reusing an existing conjured_svalue, perhaps from a different
1260 state within this analysis, or perhaps from an earlier state on this
1261 execution path. For the latter, purge any state involving the "new"
1262 svalue from the current program_state. */
1263 p.purge (sval);
1264 return sval;
1265 }
808f4dfe 1266 conjured_svalue *conjured_sval
9d804f9b 1267 = new conjured_svalue (alloc_symbol_id (), type, stmt, id_reg);
808f4dfe
DM
1268 RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1269 m_conjured_values_map.put (key, conjured_sval);
1270 return conjured_sval;
1271}
1272
ded2c2c0
DM
1273/* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1274 Return a folded svalue, or NULL. */
1275
1276const svalue *
1277region_model_manager::
1278maybe_fold_asm_output_svalue (tree type,
1279 const vec<const svalue *> &inputs)
1280{
1281 /* Unknown inputs should lead to unknown results. */
1282 for (const auto &iter : inputs)
1283 if (iter->get_kind () == SK_UNKNOWN)
1284 return get_or_create_unknown_svalue (type);
1285
1286 return NULL;
1287}
1288
1289/* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1290 asm stmt ASM_STMT, given INPUTS as inputs. */
1291
1292const svalue *
1293region_model_manager::
1294get_or_create_asm_output_svalue (tree type,
1295 const gasm *asm_stmt,
1296 unsigned output_idx,
1297 const vec<const svalue *> &inputs)
1298{
1299 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1300
1301 if (const svalue *folded
1302 = maybe_fold_asm_output_svalue (type, inputs))
1303 return folded;
1304
1305 const char *asm_string = gimple_asm_string (asm_stmt);
1306 const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1307
1308 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1309 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1310 return *slot;
1311 asm_output_svalue *asm_output_sval
9d804f9b
DM
1312 = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1313 noutputs, inputs);
ded2c2c0
DM
1314 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1315 m_asm_output_values_map.put (key, asm_output_sval);
1316 return asm_output_sval;
1317}
1318
bfca9505
DM
1319/* Return the svalue * of type TYPE for OUTPUT_IDX of a deterministic
1320 asm stmt with string ASM_STRING with NUM_OUTPUTS outputs, given
1321 INPUTS as inputs. */
1322
1323const svalue *
1324region_model_manager::
1325get_or_create_asm_output_svalue (tree type,
1326 const char *asm_string,
1327 unsigned output_idx,
1328 unsigned num_outputs,
1329 const vec<const svalue *> &inputs)
1330{
1331 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1332
1333 if (const svalue *folded
1334 = maybe_fold_asm_output_svalue (type, inputs))
1335 return folded;
1336
1337 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1338 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1339 return *slot;
1340 asm_output_svalue *asm_output_sval
9d804f9b
DM
1341 = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1342 num_outputs, inputs);
bfca9505
DM
1343 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1344 m_asm_output_values_map.put (key, asm_output_sval);
1345 return asm_output_sval;
1346}
aee1adf2
DM
1347
1348/* Return the svalue * of type TYPE for the result of a call to FNDECL
1349 with __attribute__((const)), given INPUTS as inputs. */
1350
1351const svalue *
1352region_model_manager::
1353get_or_create_const_fn_result_svalue (tree type,
1354 tree fndecl,
1355 const vec<const svalue *> &inputs)
1356{
1357 gcc_assert (type);
1358 gcc_assert (fndecl);
1359 gcc_assert (DECL_P (fndecl));
1360 gcc_assert (TREE_READONLY (fndecl));
1361 gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1362
1363 const_fn_result_svalue::key_t key (type, fndecl, inputs);
1364 if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1365 return *slot;
1366 const_fn_result_svalue *const_fn_result_sval
9d804f9b 1367 = new const_fn_result_svalue (alloc_symbol_id (), type, fndecl, inputs);
aee1adf2
DM
1368 RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1369 m_const_fn_result_values_map.put (key, const_fn_result_sval);
1370 return const_fn_result_sval;
1371}
1372
808f4dfe
DM
1373/* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1374 attempt to get the character at that offset, returning either
1375 the svalue for the character constant, or NULL if unsuccessful. */
1376
1377const svalue *
1378region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1379 tree byte_offset_cst)
1380{
1381 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1382
1383 /* Adapted from fold_read_from_constant_string. */
1384 scalar_int_mode char_mode;
1385 if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1386 && compare_tree_int (byte_offset_cst,
1387 TREE_STRING_LENGTH (string_cst)) < 0
1388 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1389 &char_mode)
1390 && GET_MODE_SIZE (char_mode) == 1)
1391 {
1392 tree char_cst
1393 = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)),
1394 (TREE_STRING_POINTER (string_cst)
1395 [TREE_INT_CST_LOW (byte_offset_cst)]));
1396 return get_or_create_constant_svalue (char_cst);
1397 }
1398 return NULL;
1399}
1400
1401/* region consolidation. */
1402
1403/* Return the region for FNDECL, creating it if necessary. */
1404
1405const function_region *
1406region_model_manager::get_region_for_fndecl (tree fndecl)
1407{
1408 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1409
1410 function_region **slot = m_fndecls_map.get (fndecl);
1411 if (slot)
1412 return *slot;
1413 function_region *reg
9d804f9b 1414 = new function_region (alloc_symbol_id (), &m_code_region, fndecl);
808f4dfe
DM
1415 m_fndecls_map.put (fndecl, reg);
1416 return reg;
1417}
1418
1419/* Return the region for LABEL, creating it if necessary. */
1420
1421const label_region *
1422region_model_manager::get_region_for_label (tree label)
1423{
1424 gcc_assert (TREE_CODE (label) == LABEL_DECL);
1425
1426 label_region **slot = m_labels_map.get (label);
1427 if (slot)
1428 return *slot;
1429
1430 tree fndecl = DECL_CONTEXT (label);
1431 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1432
1433 const function_region *func_reg = get_region_for_fndecl (fndecl);
1434 label_region *reg
9d804f9b 1435 = new label_region (alloc_symbol_id (), func_reg, label);
808f4dfe
DM
1436 m_labels_map.put (label, reg);
1437 return reg;
1438}
1439
1440/* Return the region for EXPR, creating it if necessary. */
1441
1442const decl_region *
1443region_model_manager::get_region_for_global (tree expr)
1444{
778aca1b 1445 gcc_assert (VAR_P (expr));
808f4dfe
DM
1446
1447 decl_region **slot = m_globals_map.get (expr);
1448 if (slot)
1449 return *slot;
1450 decl_region *reg
9d804f9b 1451 = new decl_region (alloc_symbol_id (), &m_globals_region, expr);
808f4dfe
DM
1452 m_globals_map.put (expr, reg);
1453 return reg;
1454}
1455
3d41408c
DM
1456/* Return the region for an unknown access of type REGION_TYPE,
1457 creating it if necessary.
1458 This is a symbolic_region, where the pointer is an unknown_svalue
1459 of type &REGION_TYPE. */
1460
1461const region *
1462region_model_manager::get_unknown_symbolic_region (tree region_type)
1463{
1464 tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1465 const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1466 return get_symbolic_region (unknown_ptr);
1467}
1468
808f4dfe
DM
1469/* Return the region that describes accessing field FIELD of PARENT,
1470 creating it if necessary. */
1471
1472const region *
1473region_model_manager::get_field_region (const region *parent, tree field)
1474{
00cb0f58
DM
1475 gcc_assert (TREE_CODE (field) == FIELD_DECL);
1476
11d4ec5d
DM
1477 /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1478 if (parent->symbolic_for_unknown_ptr_p ())
3d41408c 1479 return get_unknown_symbolic_region (TREE_TYPE (field));
11d4ec5d 1480
808f4dfe
DM
1481 field_region::key_t key (parent, field);
1482 if (field_region *reg = m_field_regions.get (key))
1483 return reg;
1484
1485 field_region *field_reg
9d804f9b 1486 = new field_region (alloc_symbol_id (), parent, field);
808f4dfe
DM
1487 m_field_regions.put (key, field_reg);
1488 return field_reg;
1489}
1490
1491/* Return the region that describes accessing the element of type
1492 ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1493
1494const region *
1495region_model_manager::get_element_region (const region *parent,
1496 tree element_type,
1497 const svalue *index)
1498{
3d41408c
DM
1499 /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1500 if (parent->symbolic_for_unknown_ptr_p ())
1501 return get_unknown_symbolic_region (element_type);
1502
808f4dfe
DM
1503 element_region::key_t key (parent, element_type, index);
1504 if (element_region *reg = m_element_regions.get (key))
1505 return reg;
1506
1507 element_region *element_reg
9d804f9b 1508 = new element_region (alloc_symbol_id (), parent, element_type, index);
808f4dfe
DM
1509 m_element_regions.put (key, element_reg);
1510 return element_reg;
1511}
1512
1513/* Return the region that describes accessing the subregion of type
1514 ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1515 necessary. */
1516
1517const region *
1518region_model_manager::get_offset_region (const region *parent,
1519 tree type,
1520 const svalue *byte_offset)
1521{
3d41408c
DM
1522 /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1523 if (parent->symbolic_for_unknown_ptr_p ())
1524 return get_unknown_symbolic_region (type);
1525
808f4dfe
DM
1526 /* If BYTE_OFFSET is zero, return PARENT. */
1527 if (tree cst_offset = byte_offset->maybe_get_constant ())
1528 if (zerop (cst_offset))
1529 return get_cast_region (parent, type);
1530
1531 /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1532 to OFFSET_REGION(REG, (X + Y)). */
1533 if (const offset_region *parent_offset_reg
1534 = parent->dyn_cast_offset_region ())
1535 {
1536 const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1537 const svalue *sval_sum
1538 = get_or_create_binop (byte_offset->get_type (),
1539 PLUS_EXPR, sval_x, byte_offset);
1540 return get_offset_region (parent->get_parent_region (), type, sval_sum);
1541 }
1542
1543 offset_region::key_t key (parent, type, byte_offset);
1544 if (offset_region *reg = m_offset_regions.get (key))
1545 return reg;
1546
1547 offset_region *offset_reg
9d804f9b 1548 = new offset_region (alloc_symbol_id (), parent, type, byte_offset);
808f4dfe
DM
1549 m_offset_regions.put (key, offset_reg);
1550 return offset_reg;
1551}
1552
e61ffa20
DM
1553/* Return the region that describes accessing the subregion of type
1554 TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1555
1556const region *
1557region_model_manager::get_sized_region (const region *parent,
1558 tree type,
1559 const svalue *byte_size_sval)
1560{
3d41408c
DM
1561 if (parent->symbolic_for_unknown_ptr_p ())
1562 return get_unknown_symbolic_region (type);
1563
e61ffa20
DM
1564 if (byte_size_sval->get_type () != size_type_node)
1565 byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1566
1567 /* If PARENT is already that size, return it. */
1568 const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1569 if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1570 if (tree size_cst = byte_size_sval->maybe_get_constant ())
1571 {
1572 tree comparison
1573 = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1574 if (comparison == boolean_true_node)
1575 return parent;
1576 }
1577
1578 sized_region::key_t key (parent, type, byte_size_sval);
1579 if (sized_region *reg = m_sized_regions.get (key))
1580 return reg;
1581
1582 sized_region *sized_reg
9d804f9b 1583 = new sized_region (alloc_symbol_id (), parent, type, byte_size_sval);
e61ffa20
DM
1584 m_sized_regions.put (key, sized_reg);
1585 return sized_reg;
1586}
1587
808f4dfe
DM
1588/* Return the region that describes accessing PARENT_REGION as if
1589 it were of type TYPE, creating it if necessary. */
1590
1591const region *
1592region_model_manager::get_cast_region (const region *original_region,
1593 tree type)
1594{
1595 /* If types match, return ORIGINAL_REGION. */
1596 if (type == original_region->get_type ())
1597 return original_region;
1598
3d41408c
DM
1599 if (original_region->symbolic_for_unknown_ptr_p ())
1600 return get_unknown_symbolic_region (type);
1601
808f4dfe
DM
1602 cast_region::key_t key (original_region, type);
1603 if (cast_region *reg = m_cast_regions.get (key))
1604 return reg;
1605
1606 cast_region *cast_reg
9d804f9b 1607 = new cast_region (alloc_symbol_id (), original_region, type);
808f4dfe
DM
1608 m_cast_regions.put (key, cast_reg);
1609 return cast_reg;
1610}
1611
1612/* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1613 if necessary. CALLING_FRAME may be NULL. */
1614
1615const frame_region *
1616region_model_manager::get_frame_region (const frame_region *calling_frame,
1617 function *fun)
1618{
1619 int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1620
1621 frame_region::key_t key (calling_frame, fun);
1622 if (frame_region *reg = m_frame_regions.get (key))
1623 return reg;
1624
1625 frame_region *frame_reg
9d804f9b 1626 = new frame_region (alloc_symbol_id (), &m_stack_region, calling_frame,
808f4dfe
DM
1627 fun, index);
1628 m_frame_regions.put (key, frame_reg);
1629 return frame_reg;
1630}
1631
1632/* Return the region that describes dereferencing SVAL, creating it
1633 if necessary. */
1634
1635const region *
1636region_model_manager::get_symbolic_region (const svalue *sval)
1637{
1638 symbolic_region::key_t key (&m_root_region, sval);
1639 if (symbolic_region *reg = m_symbolic_regions.get (key))
1640 return reg;
1641
1642 symbolic_region *symbolic_reg
9d804f9b 1643 = new symbolic_region (alloc_symbol_id (), &m_root_region, sval);
808f4dfe
DM
1644 m_symbolic_regions.put (key, symbolic_reg);
1645 return symbolic_reg;
1646}
1647
1648/* Return the region that describes accessing STRING_CST, creating it
1649 if necessary. */
1650
1651const string_region *
1652region_model_manager::get_region_for_string (tree string_cst)
1653{
1654 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1655
1656 string_region **slot = m_string_map.get (string_cst);
1657 if (slot)
1658 return *slot;
1659 string_region *reg
9d804f9b 1660 = new string_region (alloc_symbol_id (), &m_root_region, string_cst);
808f4dfe
DM
1661 m_string_map.put (string_cst, reg);
1662 return reg;
1663}
1664
93e759fc
DM
1665/* Return the region that describes accessing BITS within PARENT as TYPE,
1666 creating it if necessary. */
1667
1668const region *
1669region_model_manager::get_bit_range (const region *parent, tree type,
1670 const bit_range &bits)
1671{
1672 gcc_assert (parent);
1673
3d41408c
DM
1674 if (parent->symbolic_for_unknown_ptr_p ())
1675 return get_unknown_symbolic_region (type);
1676
93e759fc
DM
1677 bit_range_region::key_t key (parent, type, bits);
1678 if (bit_range_region *reg = m_bit_range_regions.get (key))
1679 return reg;
1680
1681 bit_range_region *bit_range_reg
9d804f9b 1682 = new bit_range_region (alloc_symbol_id (), parent, type, bits);
93e759fc
DM
1683 m_bit_range_regions.put (key, bit_range_reg);
1684 return bit_range_reg;
1685}
1686
2402dc6b
DM
1687/* Return the region that describes accessing the IDX-th variadic argument
1688 within PARENT_FRAME, creating it if necessary. */
1689
1690const var_arg_region *
1691region_model_manager::get_var_arg_region (const frame_region *parent_frame,
1692 unsigned idx)
1693{
1694 gcc_assert (parent_frame);
1695
1696 var_arg_region::key_t key (parent_frame, idx);
1697 if (var_arg_region *reg = m_var_arg_regions.get (key))
1698 return reg;
1699
1700 var_arg_region *var_arg_reg
9d804f9b 1701 = new var_arg_region (alloc_symbol_id (), parent_frame, idx);
2402dc6b
DM
1702 m_var_arg_regions.put (key, var_arg_reg);
1703 return var_arg_reg;
1704}
1705
808f4dfe
DM
1706/* If we see a tree code we don't know how to handle, rather than
1707 ICE or generate bogus results, create a dummy region, and notify
1708 CTXT so that it can mark the new state as being not properly
1709 modelled. The exploded graph can then stop exploring that path,
1710 since any diagnostics we might issue will have questionable
1711 validity. */
1712
1713const region *
1714region_model_manager::
1715get_region_for_unexpected_tree_code (region_model_context *ctxt,
1716 tree t,
1717 const dump_location_t &loc)
1718{
808f4dfe
DM
1719 tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1720 region *new_reg
9d804f9b 1721 = new unknown_region (alloc_symbol_id (), &m_root_region, type);
b00a8304
DM
1722 if (ctxt)
1723 ctxt->on_unexpected_tree_code (t, loc);
808f4dfe
DM
1724 return new_reg;
1725}
1726
ce917b04
DM
1727/* Return a region describing a heap-allocated block of memory.
1728 Reuse an existing heap_allocated_region is its id is not within
1729 BASE_REGS_IN_USE. */
808f4dfe
DM
1730
1731const region *
ce917b04 1732region_model_manager::
7dc0ecaf 1733get_or_create_region_for_heap_alloc (const bitmap &base_regs_in_use)
808f4dfe 1734{
ce917b04
DM
1735 /* Try to reuse an existing region, if it's unreferenced in the
1736 client state. */
1737 for (auto existing_reg : m_managed_dynamic_regions)
1738 if (!bitmap_bit_p (base_regs_in_use, existing_reg->get_id ()))
1739 if (existing_reg->get_kind () == RK_HEAP_ALLOCATED)
1740 return existing_reg;
1741
1742 /* All existing ones (if any) are in use; create a new one. */
808f4dfe 1743 region *reg
9d804f9b 1744 = new heap_allocated_region (alloc_symbol_id (), &m_heap_region);
808f4dfe
DM
1745 m_managed_dynamic_regions.safe_push (reg);
1746 return reg;
1747}
1748
1749/* Return a new region describing a block of memory allocated within FRAME. */
1750
1751const region *
1752region_model_manager::create_region_for_alloca (const frame_region *frame)
1753{
1754 gcc_assert (frame);
9d804f9b 1755 region *reg = new alloca_region (alloc_symbol_id (), frame);
808f4dfe
DM
1756 m_managed_dynamic_regions.safe_push (reg);
1757 return reg;
1758}
1759
1760/* Log OBJ to LOGGER. */
1761
1762template <typename T>
1763static void
1764log_managed_object (logger *logger, const T *obj)
1765{
1766 logger->start_log_line ();
1767 pretty_printer *pp = logger->get_printer ();
1768 pp_string (pp, " ");
1769 obj->dump_to_pp (pp, true);
1770 logger->end_log_line ();
1771}
1772
1773/* Specialization for frame_region, which also logs the count of locals
1774 managed by the frame_region. */
1775
1776template <>
1777void
1778log_managed_object (logger *logger, const frame_region *obj)
1779{
1780 logger->start_log_line ();
1781 pretty_printer *pp = logger->get_printer ();
1782 pp_string (pp, " ");
1783 obj->dump_to_pp (pp, true);
1784 pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
1785 logger->end_log_line ();
1786}
1787
1788/* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1789 If SHOW_OBJS is true, also dump the objects themselves. */
1790
1791template <typename K, typename T>
1792static void
1793log_uniq_map (logger *logger, bool show_objs, const char *title,
1794 const hash_map<K, T*> &uniq_map)
1795{
3989337e 1796 logger->log (" # %s: %li", title, (long)uniq_map.elements ());
b0702ac5
DM
1797 if (!show_objs)
1798 return;
1799 auto_vec<const T *> vec_objs (uniq_map.elements ());
1800 for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
1801 iter != uniq_map.end (); ++iter)
1802 vec_objs.quick_push ((*iter).second);
1803
1804 vec_objs.qsort (T::cmp_ptr_ptr);
1805
1806 unsigned i;
1807 const T *obj;
1808 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1809 log_managed_object<T> (logger, obj);
808f4dfe
DM
1810}
1811
1812/* Dump the number of objects that were managed by MAP to LOGGER.
1813 If SHOW_OBJS is true, also dump the objects themselves. */
1814
1815template <typename T>
1816static void
1817log_uniq_map (logger *logger, bool show_objs, const char *title,
1818 const consolidation_map<T> &map)
1819{
3989337e 1820 logger->log (" # %s: %li", title, (long)map.elements ());
b0702ac5
DM
1821 if (!show_objs)
1822 return;
1823
1824 auto_vec<const T *> vec_objs (map.elements ());
1825 for (typename consolidation_map<T>::iterator iter = map.begin ();
1826 iter != map.end (); ++iter)
1827 vec_objs.quick_push ((*iter).second);
1828
1829 vec_objs.qsort (T::cmp_ptr_ptr);
1830
1831 unsigned i;
1832 const T *obj;
1833 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1834 log_managed_object<T> (logger, obj);
808f4dfe
DM
1835}
1836
1837/* Dump the number of objects of each class that were managed by this
1838 manager to LOGGER.
1839 If SHOW_OBJS is true, also dump the objects themselves. */
1840
1841void
1842region_model_manager::log_stats (logger *logger, bool show_objs) const
1843{
1844 LOG_SCOPE (logger);
bb8e93eb
DM
1845 logger->log ("call string consolidation");
1846 m_empty_call_string.recursive_log (logger);
9d804f9b 1847 logger->log ("next symbol id: %i", m_next_symbol_id);
808f4dfe
DM
1848 logger->log ("svalue consolidation");
1849 log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
1850 log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
1851 if (m_unknown_NULL)
1852 log_managed_object (logger, m_unknown_NULL);
1853 log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
1854 log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
1855 log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
1856 log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
1857 log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
1858 log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
1859 log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
e61ffa20
DM
1860 log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
1861 log_uniq_map (logger, show_objs, "bits_within_svalue",
1862 m_bits_within_values_map);
808f4dfe
DM
1863 log_uniq_map (logger, show_objs, "unmergeable_svalue",
1864 m_unmergeable_values_map);
1865 log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
1866 log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
1867 log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
ded2c2c0
DM
1868 log_uniq_map (logger, show_objs, "asm_output_svalue",
1869 m_asm_output_values_map);
aee1adf2
DM
1870 log_uniq_map (logger, show_objs, "const_fn_result_svalue",
1871 m_const_fn_result_values_map);
ded2c2c0 1872
808f4dfe
DM
1873 logger->log ("max accepted svalue num_nodes: %i",
1874 m_max_complexity.m_num_nodes);
1875 logger->log ("max accepted svalue max_depth: %i",
1876 m_max_complexity.m_max_depth);
1877
1878 logger->log ("region consolidation");
808f4dfe
DM
1879 log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
1880 log_uniq_map (logger, show_objs, "label_region", m_labels_map);
1881 log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
1882 log_uniq_map (logger, show_objs, "field_region", m_field_regions);
1883 log_uniq_map (logger, show_objs, "element_region", m_element_regions);
1884 log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
e61ffa20 1885 log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
808f4dfe
DM
1886 log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
1887 log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
1888 log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
1889 log_uniq_map (logger, show_objs, "string_region", m_string_map);
93e759fc 1890 log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
2402dc6b 1891 log_uniq_map (logger, show_objs, "var_arg_region", m_var_arg_regions);
808f4dfe
DM
1892 logger->log (" # managed dynamic regions: %i",
1893 m_managed_dynamic_regions.length ());
1894 m_store_mgr.log_stats (logger, show_objs);
8ca7fa84 1895 m_range_mgr->log_stats (logger, show_objs);
808f4dfe
DM
1896}
1897
1898/* Dump the number of objects of each class that were managed by this
1899 manager to LOGGER.
1900 If SHOW_OBJS is true, also dump the objects themselves.
1901 This is here so it can use log_uniq_map. */
1902
1903void
1904store_manager::log_stats (logger *logger, bool show_objs) const
1905{
1906 LOG_SCOPE (logger);
1907 log_uniq_map (logger, show_objs, "concrete_binding",
1908 m_concrete_binding_key_mgr);
1909 log_uniq_map (logger, show_objs, "symbolic_binding",
1910 m_symbolic_binding_key_mgr);
1911}
1912
5f6197d7
DM
1913/* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
1914 (using -fdump-analyzer-untracked). */
1915
1916static void
1917dump_untracked_region (const decl_region *decl_reg)
1918{
1919 tree decl = decl_reg->get_decl ();
1920 if (TREE_CODE (decl) != VAR_DECL)
1921 return;
c788a0ea
DM
1922 /* For now, don't emit the status of decls in the constant pool, to avoid
1923 differences in DejaGnu test results between targets that use these vs
1924 those that don't.
1925 (Eventually these decls should probably be untracked and we should test
1926 for that, but that's not stage 4 material). */
1927 if (DECL_IN_CONSTANT_POOL (decl))
1928 return;
5f6197d7
DM
1929 warning_at (DECL_SOURCE_LOCATION (decl), 0,
1930 "track %qD: %s",
1931 decl, (decl_reg->tracked_p () ? "yes" : "no"));
1932}
1933
1934/* Implementation of -fdump-analyzer-untracked. */
1935
1936void
1937region_model_manager::dump_untracked_regions () const
1938{
1939 for (auto iter : m_globals_map)
1940 {
1941 const decl_region *decl_reg = iter.second;
1942 dump_untracked_region (decl_reg);
1943 }
1944 for (auto frame_iter : m_frame_regions)
1945 {
1946 const frame_region *frame_reg = frame_iter.second;
1947 frame_reg->dump_untracked_regions ();
1948 }
1949}
1950
1951void
1952frame_region::dump_untracked_regions () const
1953{
1954 for (auto iter : m_locals)
1955 {
1956 const decl_region *decl_reg = iter.second;
1957 dump_untracked_region (decl_reg);
1958 }
1959}
1960
808f4dfe
DM
1961} // namespace ana
1962
1963#endif /* #if ENABLE_ANALYZER */