]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model-manager.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / analyzer / region-model-manager.cc
CommitLineData
808f4dfe 1/* Consolidation of svalues and regions.
a945c346 2 Copyright (C) 2020-2024 Free Software Foundation, Inc.
808f4dfe
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
6341f14e 22#define INCLUDE_MEMORY
808f4dfe
DM
23#include "system.h"
24#include "coretypes.h"
25#include "tree.h"
26#include "diagnostic-core.h"
27#include "gimple-pretty-print.h"
28#include "function.h"
29#include "basic-block.h"
30#include "gimple.h"
31#include "gimple-iterator.h"
32#include "diagnostic-core.h"
33#include "graphviz.h"
34#include "options.h"
35#include "cgraph.h"
36#include "tree-dfa.h"
37#include "stringpool.h"
38#include "convert.h"
39#include "target.h"
40#include "fold-const.h"
41#include "tree-pretty-print.h"
808f4dfe 42#include "bitmap.h"
808f4dfe
DM
43#include "analyzer/analyzer.h"
44#include "analyzer/analyzer-logging.h"
45#include "ordered-hash-map.h"
46#include "options.h"
808f4dfe
DM
47#include "analyzer/supergraph.h"
48#include "sbitmap.h"
49#include "analyzer/call-string.h"
50#include "analyzer/program-point.h"
51#include "analyzer/store.h"
52#include "analyzer/region-model.h"
8ca7fa84 53#include "analyzer/constraint-manager.h"
808f4dfe
DM
54
55#if ENABLE_ANALYZER
56
57namespace ana {
58
59/* class region_model_manager. */
60
61/* region_model_manager's ctor. */
62
11a2ff8d
DM
63region_model_manager::region_model_manager (logger *logger)
64: m_logger (logger),
9d804f9b 65 m_next_symbol_id (0),
bb8e93eb 66 m_empty_call_string (),
9d804f9b
DM
67 m_root_region (alloc_symbol_id ()),
68 m_stack_region (alloc_symbol_id (), &m_root_region),
69 m_heap_region (alloc_symbol_id (), &m_root_region),
808f4dfe 70 m_unknown_NULL (NULL),
4f34f8cc 71 m_checking_feasibility (false),
808f4dfe 72 m_max_complexity (0, 0),
9d804f9b 73 m_code_region (alloc_symbol_id (), &m_root_region),
808f4dfe 74 m_fndecls_map (), m_labels_map (),
9d804f9b 75 m_globals_region (alloc_symbol_id (), &m_root_region),
808f4dfe 76 m_globals_map (),
9d804f9b
DM
77 m_thread_local_region (alloc_symbol_id (), &m_root_region),
78 m_errno_region (alloc_symbol_id (), &m_thread_local_region),
8ca7fa84 79 m_store_mgr (this),
07e30160
DM
80 m_range_mgr (new bounded_ranges_manager ()),
81 m_known_fn_mgr (logger)
808f4dfe
DM
82{
83}
84
85/* region_model_manager's dtor. Delete all of the managed svalues
86 and regions. */
87
88region_model_manager::~region_model_manager ()
89{
90 /* Delete consolidated svalues. */
91 for (constants_map_t::iterator iter = m_constants_map.begin ();
92 iter != m_constants_map.end (); ++iter)
93 delete (*iter).second;
94 for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
95 iter != m_unknowns_map.end (); ++iter)
96 delete (*iter).second;
97 delete m_unknown_NULL;
808f4dfe
DM
98 for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
99 iter != m_poisoned_values_map.end (); ++iter)
100 delete (*iter).second;
99988b0e
DM
101 for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
102 iter != m_setjmp_values_map.end (); ++iter)
103 delete (*iter).second;
808f4dfe
DM
104 for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
105 iter != m_initial_values_map.end (); ++iter)
106 delete (*iter).second;
107 for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
108 iter != m_pointer_values_map.end (); ++iter)
109 delete (*iter).second;
110 for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
111 iter != m_unaryop_values_map.end (); ++iter)
112 delete (*iter).second;
113 for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
114 iter != m_binop_values_map.end (); ++iter)
115 delete (*iter).second;
116 for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
117 iter != m_sub_values_map.end (); ++iter)
118 delete (*iter).second;
99988b0e
DM
119 for (auto iter : m_repeated_values_map)
120 delete iter.second;
121 for (auto iter : m_bits_within_values_map)
122 delete iter.second;
808f4dfe
DM
123 for (unmergeable_values_map_t::iterator iter
124 = m_unmergeable_values_map.begin ();
125 iter != m_unmergeable_values_map.end (); ++iter)
126 delete (*iter).second;
127 for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
128 iter != m_widening_values_map.end (); ++iter)
129 delete (*iter).second;
130 for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
131 iter != m_compound_values_map.end (); ++iter)
132 delete (*iter).second;
133 for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
134 iter != m_conjured_values_map.end (); ++iter)
135 delete (*iter).second;
99988b0e
DM
136 for (auto iter : m_asm_output_values_map)
137 delete iter.second;
138 for (auto iter : m_const_fn_result_values_map)
139 delete iter.second;
808f4dfe
DM
140
141 /* Delete consolidated regions. */
142 for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
143 iter != m_fndecls_map.end (); ++iter)
144 delete (*iter).second;
145 for (labels_map_t::iterator iter = m_labels_map.begin ();
146 iter != m_labels_map.end (); ++iter)
147 delete (*iter).second;
148 for (globals_map_t::iterator iter = m_globals_map.begin ();
149 iter != m_globals_map.end (); ++iter)
150 delete (*iter).second;
151 for (string_map_t::iterator iter = m_string_map.begin ();
152 iter != m_string_map.end (); ++iter)
153 delete (*iter).second;
8ca7fa84
DM
154
155 delete m_range_mgr;
808f4dfe
DM
156}
157
158/* Return true if C exceeds the complexity limit for svalues. */
159
160bool
161region_model_manager::too_complex_p (const complexity &c) const
162{
163 if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
164 return true;
165 return false;
166}
167
168/* If SVAL exceeds the complexity limit for svalues, delete it
169 and return true.
170 Otherwise update m_max_complexity and return false. */
171
172bool
173region_model_manager::reject_if_too_complex (svalue *sval)
174{
4f34f8cc 175 if (m_checking_feasibility)
60933a14
DM
176 return false;
177
808f4dfe
DM
178 const complexity &c = sval->get_complexity ();
179 if (!too_complex_p (c))
180 {
181 if (m_max_complexity.m_num_nodes < c.m_num_nodes)
182 m_max_complexity.m_num_nodes = c.m_num_nodes;
183 if (m_max_complexity.m_max_depth < c.m_max_depth)
184 m_max_complexity.m_max_depth = c.m_max_depth;
185 return false;
186 }
187
08b7462d
DM
188 pretty_printer pp;
189 pp_format_decoder (&pp) = default_tree_printer;
190 sval->dump_to_pp (&pp, true);
191 if (warning_at (input_location, OPT_Wanalyzer_symbol_too_complex,
192 "symbol too complicated: %qs",
193 pp_formatted_text (&pp)))
194 inform (input_location,
195 "max_depth %i exceeds --param=analyzer-max-svalue-depth=%i",
196 c.m_max_depth, param_analyzer_max_svalue_depth);
197
808f4dfe
DM
198 delete sval;
199 return true;
200}
201
202/* Macro for imposing a complexity limit on svalues, for use within
203 region_model_manager member functions.
204
205 If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
206 value of the same type.
207 Otherwise update m_max_complexity and carry on. */
208
209#define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
210 do { \
211 svalue *sval_ = (SVAL); \
212 tree type_ = sval_->get_type (); \
213 if (reject_if_too_complex (sval_)) \
214 return get_or_create_unknown_svalue (type_); \
215 } while (0)
216
217/* svalue consolidation. */
218
219/* Return the svalue * for a constant_svalue for CST_EXPR,
220 creating it if necessary.
221 The constant_svalue instances are reused, based on pointer equality
222 of trees */
223
224const svalue *
225region_model_manager::get_or_create_constant_svalue (tree cst_expr)
226{
227 gcc_assert (cst_expr);
2aefe248 228 gcc_assert (CONSTANT_CLASS_P (cst_expr));
808f4dfe
DM
229
230 constant_svalue **slot = m_constants_map.get (cst_expr);
231 if (slot)
232 return *slot;
9d804f9b
DM
233 constant_svalue *cst_sval
234 = new constant_svalue (alloc_symbol_id (), cst_expr);
808f4dfe
DM
235 RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
236 m_constants_map.put (cst_expr, cst_sval);
237 return cst_sval;
238}
239
1aff29d4
DM
240/* Return the svalue * for a constant_svalue for the INTEGER_CST
241 for VAL of type TYPE, creating it if necessary. */
242
243const svalue *
0e466e97
DM
244region_model_manager::get_or_create_int_cst (tree type,
245 const poly_wide_int_ref &cst)
1aff29d4
DM
246{
247 gcc_assert (type);
b86c0fe3 248 gcc_assert (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type));
0e466e97 249 tree tree_cst = wide_int_to_tree (type, cst);
1aff29d4
DM
250 return get_or_create_constant_svalue (tree_cst);
251}
252
dcfc7ac9
DM
253/* Return the svalue * for the constant_svalue for the NULL pointer
254 of POINTER_TYPE, creating it if necessary. */
255
256const svalue *
257region_model_manager::get_or_create_null_ptr (tree pointer_type)
258{
259 gcc_assert (pointer_type);
260 gcc_assert (POINTER_TYPE_P (pointer_type));
261 return get_or_create_int_cst (pointer_type, 0);
262}
263
808f4dfe
DM
264/* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
265 creating it if necessary.
266 The unknown_svalue instances are reused, based on pointer equality
267 of the types */
268
269const svalue *
270region_model_manager::get_or_create_unknown_svalue (tree type)
271{
4f34f8cc
DM
272 /* Don't create unknown values when doing feasibility testing;
273 instead, create a unique svalue. */
274 if (m_checking_feasibility)
275 return create_unique_svalue (type);
276
808f4dfe
DM
277 /* Special-case NULL, so that the hash_map can use NULL as the
278 "empty" value. */
279 if (type == NULL_TREE)
280 {
281 if (!m_unknown_NULL)
9d804f9b 282 m_unknown_NULL = new unknown_svalue (alloc_symbol_id (), type);
808f4dfe
DM
283 return m_unknown_NULL;
284 }
285
286 unknown_svalue **slot = m_unknowns_map.get (type);
287 if (slot)
288 return *slot;
9d804f9b 289 unknown_svalue *sval = new unknown_svalue (alloc_symbol_id (), type);
808f4dfe
DM
290 m_unknowns_map.put (type, sval);
291 return sval;
292}
293
4f34f8cc
DM
294/* Return a freshly-allocated svalue of TYPE, owned by this manager. */
295
296const svalue *
297region_model_manager::create_unique_svalue (tree type)
298{
9d804f9b 299 svalue *sval = new placeholder_svalue (alloc_symbol_id (), type, "unique");
4f34f8cc
DM
300 m_managed_dynamic_svalues.safe_push (sval);
301 return sval;
302}
303
808f4dfe
DM
304/* Return the svalue * for the initial value of REG, creating it if
305 necessary. */
306
307const svalue *
1eb90f46 308region_model_manager::get_or_create_initial_value (const region *reg,
309 bool check_poisoned)
808f4dfe 310{
1eb90f46 311 if (!reg->can_have_initial_svalue_p () && check_poisoned)
33255ad3
DM
312 return get_or_create_poisoned_svalue (POISON_KIND_UNINIT,
313 reg->get_type ());
314
808f4dfe
DM
315 /* The initial value of a cast is a cast of the initial value. */
316 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
317 {
318 const region *original_reg = cast_reg->get_original_region ();
319 return get_or_create_cast (cast_reg->get_type (),
320 get_or_create_initial_value (original_reg));
321 }
322
46cb27e5
DM
323 /* Simplify:
324 INIT_VAL(ELEMENT_REG(STRING_REG), CONSTANT_SVAL)
325 to:
326 CONSTANT_SVAL(STRING[N]). */
327 if (const element_region *element_reg = reg->dyn_cast_element_region ())
328 if (tree cst_idx = element_reg->get_index ()->maybe_get_constant ())
329 if (const string_region *string_reg
330 = element_reg->get_parent_region ()->dyn_cast_string_region ())
331 if (tree_fits_shwi_p (cst_idx))
332 {
333 HOST_WIDE_INT idx = tree_to_shwi (cst_idx);
334 tree string_cst = string_reg->get_string_cst ();
335 if (idx >= 0 && idx <= TREE_STRING_LENGTH (string_cst))
336 {
337 int ch = TREE_STRING_POINTER (string_cst)[idx];
338 return get_or_create_int_cst (reg->get_type (), ch);
339 }
340 }
341
11d4ec5d
DM
342 /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
343 if (reg->symbolic_for_unknown_ptr_p ())
344 return get_or_create_unknown_svalue (reg->get_type ());
345
808f4dfe
DM
346 if (initial_svalue **slot = m_initial_values_map.get (reg))
347 return *slot;
9d804f9b
DM
348 initial_svalue *initial_sval
349 = new initial_svalue (alloc_symbol_id (), reg->get_type (), reg);
808f4dfe
DM
350 RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
351 m_initial_values_map.put (reg, initial_sval);
352 return initial_sval;
353}
354
355/* Return the svalue * for R using type TYPE, creating it if
356 necessary. */
357
358const svalue *
359region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
360 tree type)
361{
362 setjmp_svalue::key_t key (r, type);
363 if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
364 return *slot;
9d804f9b 365 setjmp_svalue *setjmp_sval = new setjmp_svalue (r, alloc_symbol_id (), type);
808f4dfe
DM
366 RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
367 m_setjmp_values_map.put (key, setjmp_sval);
368 return setjmp_sval;
369}
370
371/* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
372 necessary. */
373
374const svalue *
375region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
376 tree type)
377{
378 poisoned_svalue::key_t key (kind, type);
379 if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
380 return *slot;
9d804f9b
DM
381 poisoned_svalue *poisoned_sval
382 = new poisoned_svalue (kind, alloc_symbol_id (), type);
808f4dfe
DM
383 RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
384 m_poisoned_values_map.put (key, poisoned_sval);
385 return poisoned_sval;
386}
387
388/* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
389 creating it if necessary. */
390
391const svalue *
392region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
393{
394 /* If this is a symbolic region from dereferencing a pointer, and the types
395 match, then return the original pointer. */
396 if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
397 if (ptr_type == sym_reg->get_pointer ()->get_type ())
398 return sym_reg->get_pointer ();
399
400 region_svalue::key_t key (ptr_type, pointee);
401 if (region_svalue **slot = m_pointer_values_map.get (key))
402 return *slot;
9d804f9b
DM
403 region_svalue *sval
404 = new region_svalue (alloc_symbol_id (), ptr_type, pointee);
808f4dfe
DM
405 RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
406 m_pointer_values_map.put (key, sval);
407 return sval;
408}
409
410/* Subroutine of region_model_manager::get_or_create_unaryop.
411 Attempt to fold the inputs and return a simpler svalue *.
412 Otherwise, return NULL. */
413
414const svalue *
415region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
416 const svalue *arg)
417{
418 /* Ops on "unknown" are also unknown. */
419 if (arg->get_kind () == SK_UNKNOWN)
420 return get_or_create_unknown_svalue (type);
a113b143
DM
421 /* Likewise for "poisoned". */
422 else if (const poisoned_svalue *poisoned_sval
423 = arg->dyn_cast_poisoned_svalue ())
424 return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
425 type);
426
427 gcc_assert (arg->can_have_associated_state_p ());
808f4dfe
DM
428
429 switch (op)
430 {
431 default: break;
ecdb9322 432 case VIEW_CONVERT_EXPR:
808f4dfe
DM
433 case NOP_EXPR:
434 {
435 /* Handle redundant casts. */
436 if (arg->get_type ()
437 && useless_type_conversion_p (arg->get_type (), type))
438 return arg;
439
440 /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
441 => "cast<TYPE> (innermost_arg)",
442 unless INNER_TYPE is narrower than TYPE. */
443 if (const svalue *innermost_arg = arg->maybe_undo_cast ())
444 {
445 tree inner_type = arg->get_type ();
446 if (TYPE_SIZE (type)
447 && TYPE_SIZE (inner_type)
448 && (fold_binary (LE_EXPR, boolean_type_node,
449 TYPE_SIZE (type), TYPE_SIZE (inner_type))
450 == boolean_true_node))
451 return maybe_fold_unaryop (type, op, innermost_arg);
452 }
111fd515
DM
453 /* Avoid creating symbolic regions for pointer casts by
454 simplifying (T*)(&REGION) to ((T*)&REGION). */
455 if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
456 if (POINTER_TYPE_P (type)
457 && region_sval->get_type ()
458 && POINTER_TYPE_P (region_sval->get_type ()))
459 return get_ptr_svalue (type, region_sval->get_pointee ());
808f4dfe
DM
460 }
461 break;
462 case TRUTH_NOT_EXPR:
463 {
464 /* Invert comparisons e.g. "!(x == y)" => "x != y". */
465 if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
466 if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
467 {
468 enum tree_code inv_op
469 = invert_tree_comparison (binop->get_op (),
470 HONOR_NANS (binop->get_type ()));
471 if (inv_op != ERROR_MARK)
472 return get_or_create_binop (binop->get_type (), inv_op,
473 binop->get_arg0 (),
474 binop->get_arg1 ());
475 }
476 }
477 break;
7f42f7ad
DM
478 case NEGATE_EXPR:
479 {
480 /* -(-(VAL)) is VAL, for integer types. */
481 if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ())
482 if (unaryop->get_op () == NEGATE_EXPR
483 && type == unaryop->get_type ()
484 && type
485 && INTEGRAL_TYPE_P (type))
486 return unaryop->get_arg ();
487 }
488 break;
808f4dfe
DM
489 }
490
491 /* Constants. */
492 if (tree cst = arg->maybe_get_constant ())
493 if (tree result = fold_unary (op, type, cst))
2aefe248
DM
494 {
495 if (CONSTANT_CLASS_P (result))
496 return get_or_create_constant_svalue (result);
497
498 /* fold_unary can return casts of constants; try to handle them. */
499 if (op != NOP_EXPR
500 && type
501 && TREE_CODE (result) == NOP_EXPR
502 && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
503 {
504 const svalue *inner_cst
505 = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
506 return get_or_create_cast (type,
507 get_or_create_cast (TREE_TYPE (result),
508 inner_cst));
509 }
510 }
808f4dfe
DM
511
512 return NULL;
513}
514
515/* Return the svalue * for an unary operation OP on ARG with a result of
516 type TYPE, creating it if necessary. */
517
518const svalue *
519region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
520 const svalue *arg)
521{
522 if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
523 return folded;
524 unaryop_svalue::key_t key (type, op, arg);
525 if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
526 return *slot;
9d804f9b
DM
527 unaryop_svalue *unaryop_sval
528 = new unaryop_svalue (alloc_symbol_id (), type, op, arg);
808f4dfe
DM
529 RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
530 m_unaryop_values_map.put (key, unaryop_sval);
531 return unaryop_sval;
532}
533
ecdb9322
DM
534/* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
535 Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
536 of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
537 and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
538 on. */
539
540static enum tree_code
541get_code_for_cast (tree dst_type, tree src_type)
542{
543 gcc_assert (dst_type);
544 if (!src_type)
545 return NOP_EXPR;
546
778aca1b 547 if (SCALAR_FLOAT_TYPE_P (src_type))
ecdb9322
DM
548 {
549 if (TREE_CODE (dst_type) == INTEGER_TYPE)
550 return FIX_TRUNC_EXPR;
551 else
552 return VIEW_CONVERT_EXPR;
553 }
554
555 return NOP_EXPR;
556}
557
808f4dfe
DM
558/* Return the svalue * for a cast of ARG to type TYPE, creating it
559 if necessary. */
560
561const svalue *
562region_model_manager::get_or_create_cast (tree type, const svalue *arg)
563{
366bd1ac 564 gcc_assert (type);
45b999f6
DM
565
566 /* No-op if the types are the same. */
567 if (type == arg->get_type ())
568 return arg;
569
570 /* Don't attempt to handle casts involving vector types for now. */
778aca1b 571 if (VECTOR_TYPE_P (type)
45b999f6 572 || (arg->get_type ()
778aca1b 573 && VECTOR_TYPE_P (arg->get_type ())))
45b999f6
DM
574 return get_or_create_unknown_svalue (type);
575
ecdb9322
DM
576 enum tree_code op = get_code_for_cast (type, arg->get_type ());
577 return get_or_create_unaryop (type, op, arg);
808f4dfe
DM
578}
579
ec3fafa9
DM
580/* Subroutine of region_model_manager::maybe_fold_binop for handling
581 (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
582 optimize_bit_field_compare, where CST is from ARG1.
583
584 Support masking out bits from a compound_svalue for comparing a bitfield
585 against a value, as generated by optimize_bit_field_compare for
586 BITFIELD == VALUE.
587
588 If COMPOUND_SVAL has a value for the appropriate bits, return it,
589 shifted accordingly.
590 Otherwise return NULL. */
591
592const svalue *
593region_model_manager::
594maybe_undo_optimize_bit_field_compare (tree type,
595 const compound_svalue *compound_sval,
596 tree cst,
597 const svalue *arg1)
598{
ec3fafa9
DM
599 const binding_map &map = compound_sval->get_map ();
600 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
601 /* If "mask" is a contiguous range of set bits, see if the
602 compound_sval has a value for those bits. */
603 bit_range bits (0, 0);
604 if (!bit_range::from_mask (mask, &bits))
605 return NULL;
606
607 bit_range bound_bits (bits);
608 if (BYTES_BIG_ENDIAN)
609 bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
610 bits.m_size_in_bits);
611 const concrete_binding *conc
e61ffa20 612 = get_store_manager ()->get_concrete_binding (bound_bits);
ec3fafa9
DM
613 const svalue *sval = map.get (conc);
614 if (!sval)
615 return NULL;
616
617 /* We have a value;
618 shift it by the correct number of bits. */
619 const svalue *lhs = get_or_create_cast (type, sval);
620 HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
1aff29d4 621 const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
ec3fafa9
DM
622 const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
623 lhs, shift_sval);
624 /* Reapply the mask (needed for negative
625 signed bitfields). */
626 return get_or_create_binop (type, BIT_AND_EXPR,
627 shifted_sval, arg1);
628}
629
808f4dfe
DM
630/* Subroutine of region_model_manager::get_or_create_binop.
631 Attempt to fold the inputs and return a simpler svalue *.
632 Otherwise, return NULL. */
633
634const svalue *
635region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
636 const svalue *arg0,
637 const svalue *arg1)
638{
639 tree cst0 = arg0->maybe_get_constant ();
640 tree cst1 = arg1->maybe_get_constant ();
641 /* (CST OP CST). */
642 if (cst0 && cst1)
643 {
644 if (tree result = fold_binary (op, type, cst0, cst1))
645 if (CONSTANT_CLASS_P (result))
646 return get_or_create_constant_svalue (result);
647 }
648
0e466e97 649 if ((type && FLOAT_TYPE_P (type))
808f4dfe
DM
650 || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
651 || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
652 return NULL;
653
654 switch (op)
655 {
656 default:
657 break;
658 case POINTER_PLUS_EXPR:
659 case PLUS_EXPR:
660 /* (VAL + 0) -> VAL. */
3a32fb2e
DM
661 if (cst1 && zerop (cst1))
662 return get_or_create_cast (type, arg0);
808f4dfe
DM
663 break;
664 case MINUS_EXPR:
665 /* (VAL - 0) -> VAL. */
3a32fb2e
DM
666 if (cst1 && zerop (cst1))
667 return get_or_create_cast (type, arg0);
0b737090
DM
668 /* (0 - VAL) -> -VAL. */
669 if (cst0 && zerop (cst0))
670 return get_or_create_unaryop (type, NEGATE_EXPR, arg1);
0e466e97
DM
671 /* (X + Y) - X -> Y. */
672 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
673 if (binop->get_op () == PLUS_EXPR)
674 if (binop->get_arg0 () == arg1)
675 return get_or_create_cast (type, binop->get_arg1 ());
808f4dfe
DM
676 break;
677 case MULT_EXPR:
678 /* (VAL * 0). */
fc02b568 679 if (cst1 && zerop (cst1) && INTEGRAL_TYPE_P (type))
808f4dfe
DM
680 return get_or_create_constant_svalue (build_int_cst (type, 0));
681 /* (VAL * 1) -> VAL. */
682 if (cst1 && integer_onep (cst1))
021077b9
DM
683 /* TODO: we ought to have a cast to TYPE here, but doing so introduces
684 regressions; see PR analyzer/110902. */
808f4dfe
DM
685 return arg0;
686 break;
df2b78d4
DM
687 case BIT_AND_EXPR:
688 if (cst1)
d3b1ef7a
DM
689 {
690 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
691 /* "(ARG0 & 0)" -> "0". */
692 return get_or_create_constant_svalue (build_int_cst (type, 0));
693
d3b1ef7a
DM
694 if (const compound_svalue *compound_sval
695 = arg0->dyn_cast_compound_svalue ())
ec3fafa9
DM
696 if (const svalue *sval
697 = maybe_undo_optimize_bit_field_compare (type,
698 compound_sval,
699 cst1, arg1))
700 return sval;
d3b1ef7a 701 }
4f34f8cc
DM
702 if (arg0->get_type () == boolean_type_node
703 && arg1->get_type () == boolean_type_node)
704 {
705 /* If the LHS are both _Bool, then... */
706 /* ..."(1 & x) -> x". */
707 if (cst0 && !zerop (cst0))
708 return get_or_create_cast (type, arg1);
709 /* ..."(x & 1) -> x". */
710 if (cst1 && !zerop (cst1))
711 return get_or_create_cast (type, arg0);
712 /* ..."(0 & x) -> 0". */
713 if (cst0 && zerop (cst0))
714 return get_or_create_int_cst (type, 0);
715 /* ..."(x & 0) -> 0". */
716 if (cst1 && zerop (cst1))
717 return get_or_create_int_cst (type, 0);
718 }
719 break;
720 case BIT_IOR_EXPR:
721 if (arg0->get_type () == boolean_type_node
722 && arg1->get_type () == boolean_type_node)
723 {
724 /* If the LHS are both _Bool, then... */
725 /* ..."(1 | x) -> 1". */
726 if (cst0 && !zerop (cst0))
727 return get_or_create_int_cst (type, 1);
728 /* ..."(x | 1) -> 1". */
729 if (cst1 && !zerop (cst1))
730 return get_or_create_int_cst (type, 1);
731 /* ..."(0 | x) -> x". */
732 if (cst0 && zerop (cst0))
733 return get_or_create_cast (type, arg1);
734 /* ..."(x | 0) -> x". */
735 if (cst1 && zerop (cst1))
736 return get_or_create_cast (type, arg0);
737 }
df2b78d4 738 break;
808f4dfe
DM
739 case TRUTH_ANDIF_EXPR:
740 case TRUTH_AND_EXPR:
741 if (cst1)
742 {
fc02b568 743 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
808f4dfe
DM
744 /* "(ARG0 && 0)" -> "0". */
745 return get_or_create_constant_svalue (build_int_cst (type, 0));
746 else
747 /* "(ARG0 && nonzero-cst)" -> "ARG0". */
748 return get_or_create_cast (type, arg0);
749 }
750 break;
751 case TRUTH_ORIF_EXPR:
752 case TRUTH_OR_EXPR:
753 if (cst1)
754 {
755 if (zerop (cst1))
756 /* "(ARG0 || 0)" -> "ARG0". */
757 return get_or_create_cast (type, arg0);
758 else
759 /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
760 return get_or_create_cast (type, arg1);
761 }
762 break;
763 }
764
765 /* For associative ops, fold "(X op CST_A) op CST_B)" to
766 "X op (CST_A op CST_B)". */
767 if (cst1 && associative_tree_code (op))
768 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
769 if (binop->get_op () == op
0e466e97 770 && binop->get_arg1 ()->maybe_get_constant ())
808f4dfe
DM
771 return get_or_create_binop
772 (type, op, binop->get_arg0 (),
773 get_or_create_binop (type, op,
774 binop->get_arg1 (), arg1));
775
776 /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
777 can fold:
778 "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
779 e.g. in data-model-1.c: test_4c. */
780 if (cst1 && op == POINTER_PLUS_EXPR)
781 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
782 if (binop->get_op () == POINTER_PLUS_EXPR)
783 if (binop->get_arg1 ()->maybe_get_constant ())
784 return get_or_create_binop
785 (type, op, binop->get_arg0 (),
786 get_or_create_binop (size_type_node, op,
787 binop->get_arg1 (), arg1));
788
0e466e97
DM
789 /* Distribute multiplication by a constant through addition/subtraction:
790 (X + Y) * CST => (X * CST) + (Y * CST). */
791 if (cst1 && op == MULT_EXPR)
792 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
793 if (binop->get_op () == PLUS_EXPR
794 || binop->get_op () == MINUS_EXPR)
795 {
796 return get_or_create_binop
797 (type, binop->get_op (),
798 get_or_create_binop (type, op,
799 binop->get_arg0 (), arg1),
800 get_or_create_binop (type, op,
801 binop->get_arg1 (), arg1));
802 }
803
808f4dfe
DM
804 /* etc. */
805
806 return NULL;
807}
808
809/* Return the svalue * for an binary operation OP on ARG0 and ARG1
810 with a result of type TYPE, creating it if necessary. */
811
812const svalue *
813region_model_manager::get_or_create_binop (tree type, enum tree_code op,
814 const svalue *arg0,
815 const svalue *arg1)
816{
817 /* For commutative ops, put any constant on the RHS. */
818 if (arg0->maybe_get_constant () && commutative_tree_code (op))
819 std::swap (arg0, arg1);
820
821 if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
822 return folded;
823
a113b143
DM
824 /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
825 it via an identity in maybe_fold_binop). */
826 if (!arg0->can_have_associated_state_p ()
827 || !arg1->can_have_associated_state_p ())
828 return get_or_create_unknown_svalue (type);
829
808f4dfe
DM
830 binop_svalue::key_t key (type, op, arg0, arg1);
831 if (binop_svalue **slot = m_binop_values_map.get (key))
832 return *slot;
9d804f9b
DM
833 binop_svalue *binop_sval
834 = new binop_svalue (alloc_symbol_id (), type, op, arg0, arg1);
808f4dfe
DM
835 RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
836 m_binop_values_map.put (key, binop_sval);
837 return binop_sval;
838}
839
840/* Subroutine of region_model_manager::get_or_create_sub_svalue.
841 Return a folded svalue, or NULL. */
842
843const svalue *
844region_model_manager::maybe_fold_sub_svalue (tree type,
845 const svalue *parent_svalue,
846 const region *subregion)
847{
a113b143
DM
848 /* Subvalues of "unknown"/"poisoned" are unknown. */
849 if (!parent_svalue->can_have_associated_state_p ())
808f4dfe
DM
850 return get_or_create_unknown_svalue (type);
851
852 /* If we have a subregion of a zero-fill, it's zero. */
853 if (const unaryop_svalue *unary
854 = parent_svalue->dyn_cast_unaryop_svalue ())
855 {
ecdb9322
DM
856 if (unary->get_op () == NOP_EXPR
857 || unary->get_op () == VIEW_CONVERT_EXPR)
808f4dfe 858 if (tree cst = unary->get_arg ()->maybe_get_constant ())
84832cab 859 if (zerop (cst) && type)
808f4dfe
DM
860 {
861 const svalue *cst_sval
862 = get_or_create_constant_svalue (cst);
863 return get_or_create_cast (type, cst_sval);
864 }
865 }
866
867 /* Handle getting individual chars from a STRING_CST. */
868 if (tree cst = parent_svalue->maybe_get_constant ())
869 if (TREE_CODE (cst) == STRING_CST)
2ac7b19f
DM
870 {
871 /* If we have a concrete 1-byte access within the parent region... */
872 byte_range subregion_bytes (0, 0);
873 if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
84832cab
DM
874 && subregion_bytes.m_size_in_bytes == 1
875 && type)
2ac7b19f
DM
876 {
877 /* ...then attempt to get that char from the STRING_CST. */
878 HOST_WIDE_INT hwi_start_byte
879 = subregion_bytes.m_start_byte_offset.to_shwi ();
880 tree cst_idx
881 = build_int_cst_type (size_type_node, hwi_start_byte);
808f4dfe
DM
882 if (const svalue *char_sval
883 = maybe_get_char_from_string_cst (cst, cst_idx))
884 return get_or_create_cast (type, char_sval);
2ac7b19f
DM
885 }
886 }
808f4dfe 887
808f4dfe 888 if (const initial_svalue *init_sval
e61ffa20 889 = parent_svalue->dyn_cast_initial_svalue ())
808f4dfe 890 {
e61ffa20
DM
891 /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
892 i.e.
893 Subvalue(InitialValue(R1), FieldRegion(R2, F))
894 -> InitialValue(FieldRegion(R1, F)). */
808f4dfe
DM
895 if (const field_region *field_reg = subregion->dyn_cast_field_region ())
896 {
897 const region *field_reg_new
898 = get_field_region (init_sval->get_region (),
899 field_reg->get_field ());
900 return get_or_create_initial_value (field_reg_new);
901 }
e61ffa20
DM
902 /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
903 i.e.
904 Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
905 -> InitialValue(ElementRegion(R1, IDX)). */
906 if (const element_region *element_reg = subregion->dyn_cast_element_region ())
907 {
908 const region *element_reg_new
909 = get_element_region (init_sval->get_region (),
910 element_reg->get_type (),
911 element_reg->get_index ());
912 return get_or_create_initial_value (element_reg_new);
913 }
808f4dfe
DM
914 }
915
e61ffa20
DM
916 if (const repeated_svalue *repeated_sval
917 = parent_svalue->dyn_cast_repeated_svalue ())
79e746bb
DM
918 if (type)
919 return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
e61ffa20 920
808f4dfe
DM
921 return NULL;
922}
923
924/* Return the svalue * for extracting a subvalue of type TYPE from
925 PARENT_SVALUE based on SUBREGION, creating it if necessary. */
926
927const svalue *
928region_model_manager::get_or_create_sub_svalue (tree type,
929 const svalue *parent_svalue,
930 const region *subregion)
931{
932 if (const svalue *folded
933 = maybe_fold_sub_svalue (type, parent_svalue, subregion))
934 return folded;
935
936 sub_svalue::key_t key (type, parent_svalue, subregion);
937 if (sub_svalue **slot = m_sub_values_map.get (key))
938 return *slot;
939 sub_svalue *sub_sval
9d804f9b 940 = new sub_svalue (alloc_symbol_id (), type, parent_svalue, subregion);
808f4dfe
DM
941 RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
942 m_sub_values_map.put (key, sub_sval);
943 return sub_sval;
944}
945
e61ffa20
DM
946/* Subroutine of region_model_manager::get_or_create_repeated_svalue.
947 Return a folded svalue, or NULL. */
948
949const svalue *
950region_model_manager::maybe_fold_repeated_svalue (tree type,
951 const svalue *outer_size,
952 const svalue *inner_svalue)
953{
a113b143
DM
954 /* Repeated "unknown"/"poisoned" is unknown. */
955 if (!outer_size->can_have_associated_state_p ()
956 || !inner_svalue->can_have_associated_state_p ())
957 return get_or_create_unknown_svalue (type);
958
e61ffa20
DM
959 /* If INNER_SVALUE is the same size as OUTER_SIZE,
960 turn into simply a cast. */
961 if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
962 {
963 HOST_WIDE_INT num_bytes_inner_svalue
964 = int_size_in_bytes (inner_svalue->get_type ());
965 if (num_bytes_inner_svalue != -1)
966 if (num_bytes_inner_svalue
967 == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
968 {
969 if (type)
970 return get_or_create_cast (type, inner_svalue);
971 else
972 return inner_svalue;
973 }
974 }
975
976 /* Handle zero-fill of a specific type. */
977 if (tree cst = inner_svalue->maybe_get_constant ())
978 if (zerop (cst) && type)
979 return get_or_create_cast (type, inner_svalue);
980
981 return NULL;
982}
983
984/* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
985 enough times to be of size OUTER_SIZE, creating it if necessary.
986 e.g. for filling buffers with a constant value. */
987
988const svalue *
989region_model_manager::get_or_create_repeated_svalue (tree type,
990 const svalue *outer_size,
991 const svalue *inner_svalue)
992{
993 if (const svalue *folded
994 = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
995 return folded;
996
997 repeated_svalue::key_t key (type, outer_size, inner_svalue);
998 if (repeated_svalue **slot = m_repeated_values_map.get (key))
999 return *slot;
1000 repeated_svalue *repeated_sval
9d804f9b 1001 = new repeated_svalue (alloc_symbol_id (), type, outer_size, inner_svalue);
e61ffa20
DM
1002 RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
1003 m_repeated_values_map.put (key, repeated_sval);
1004 return repeated_sval;
1005}
1006
1007/* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
1008 Return true and write the result to OUT if successful.
1009 Return false otherwise. */
1010
1011static bool
1012get_bit_range_for_field (tree field, bit_range *out)
1013{
1014 bit_size_t bit_size;
1015 if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
1016 return false;
1017 int field_bit_offset = int_bit_position (field);
1018 *out = bit_range (field_bit_offset, bit_size);
1019 return true;
1020}
1021
1022/* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
1023 Return true and write the result to OUT if successful.
1024 Return false otherwise. */
1025
1026static bool
1027get_byte_range_for_field (tree field, byte_range *out)
1028{
1029 bit_range field_bits (0, 0);
1030 if (!get_bit_range_for_field (field, &field_bits))
1031 return false;
1032 return field_bits.as_byte_range (out);
1033}
1034
1035/* Attempt to determine if there is a specific field within RECORD_TYPE
1036 at BYTES. If so, return it, and write the location of BYTES relative
1037 to the field to *OUT_RANGE_WITHIN_FIELD.
1038 Otherwise, return NULL_TREE.
1039 For example, given:
1040 struct foo { uint32 a; uint32; b};
1041 and
1042 bytes = {bytes 6-7} (of foo)
1043 we have bytes 3-4 of field b. */
1044
1045static tree
1046get_field_at_byte_range (tree record_type, const byte_range &bytes,
1047 byte_range *out_range_within_field)
1048{
1049 bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
1050
1051 tree field = get_field_at_bit_offset (record_type, bit_offset);
1052 if (!field)
1053 return NULL_TREE;
1054
1055 byte_range field_bytes (0,0);
1056 if (!get_byte_range_for_field (field, &field_bytes))
1057 return NULL_TREE;
1058
1059 /* Is BYTES fully within field_bytes? */
1060 byte_range bytes_within_field (0,0);
1061 if (!field_bytes.contains_p (bytes, &bytes_within_field))
1062 return NULL_TREE;
1063
1064 *out_range_within_field = bytes_within_field;
1065 return field;
1066}
1067
1068/* Subroutine of region_model_manager::get_or_create_bits_within.
1069 Return a folded svalue, or NULL. */
1070
1071const svalue *
1072region_model_manager::maybe_fold_bits_within_svalue (tree type,
1073 const bit_range &bits,
1074 const svalue *inner_svalue)
1075{
1076 tree inner_type = inner_svalue->get_type ();
1077 /* Fold:
1078 BITS_WITHIN ((0, sizeof (VAL), VAL))
1079 to:
1080 CAST(TYPE, VAL). */
1081 if (bits.m_start_bit_offset == 0 && inner_type)
1082 {
1083 bit_size_t inner_type_size;
1084 if (int_size_in_bits (inner_type, &inner_type_size))
1085 if (inner_type_size == bits.m_size_in_bits)
1086 {
1087 if (type)
1088 return get_or_create_cast (type, inner_svalue);
1089 else
1090 return inner_svalue;
1091 }
1092 }
1093
1094 /* Kind-specific folding. */
1095 if (const svalue *sval
1096 = inner_svalue->maybe_fold_bits_within (type, bits, this))
1097 return sval;
1098
1099 byte_range bytes (0,0);
1100 if (bits.as_byte_range (&bytes) && inner_type)
1101 switch (TREE_CODE (inner_type))
1102 {
1103 default:
1104 break;
1105 case ARRAY_TYPE:
1106 {
1107 /* Fold:
1108 BITS_WITHIN (range, KIND(REG))
1109 to:
1110 BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1111 if range1 is a byte-range fully within one ELEMENT. */
1112 tree element_type = TREE_TYPE (inner_type);
1113 HOST_WIDE_INT element_byte_size
1114 = int_size_in_bytes (element_type);
1115 if (element_byte_size > 0)
1116 {
1117 HOST_WIDE_INT start_idx
1118 = (bytes.get_start_byte_offset ().to_shwi ()
1119 / element_byte_size);
1120 HOST_WIDE_INT last_idx
1121 = (bytes.get_last_byte_offset ().to_shwi ()
1122 / element_byte_size);
1123 if (start_idx == last_idx)
1124 {
1125 if (const initial_svalue *initial_sval
1126 = inner_svalue->dyn_cast_initial_svalue ())
1127 {
1128 bit_offset_t start_of_element
1129 = start_idx * element_byte_size * BITS_PER_UNIT;
1130 bit_range bits_within_element
1131 (bits.m_start_bit_offset - start_of_element,
1132 bits.m_size_in_bits);
1133 const svalue *idx_sval
1134 = get_or_create_int_cst (integer_type_node, start_idx);
1135 const region *element_reg =
1136 get_element_region (initial_sval->get_region (),
1137 element_type, idx_sval);
1138 const svalue *element_reg_sval
1139 = get_or_create_initial_value (element_reg);
1140 return get_or_create_bits_within (type,
1141 bits_within_element,
1142 element_reg_sval);
1143 }
1144 }
1145 }
1146 }
1147 break;
1148 case RECORD_TYPE:
1149 {
1150 /* Fold:
1151 BYTES_WITHIN (range, KIND(REG))
1152 to:
1153 BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1154 if range1 is fully within FIELD. */
1155 byte_range bytes_within_field (0, 0);
1156 if (tree field = get_field_at_byte_range (inner_type, bytes,
1157 &bytes_within_field))
1158 {
1159 if (const initial_svalue *initial_sval
1160 = inner_svalue->dyn_cast_initial_svalue ())
1161 {
1162 const region *field_reg =
1163 get_field_region (initial_sval->get_region (), field);
1164 const svalue *initial_reg_sval
1165 = get_or_create_initial_value (field_reg);
1166 return get_or_create_bits_within
1167 (type,
1168 bytes_within_field.as_bit_range (),
1169 initial_reg_sval);
1170 }
1171 }
1172 }
1173 break;
1174 }
1175 return NULL;
1176}
1177
1178/* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1179 creating it if necessary. */
1180
1181const svalue *
1182region_model_manager::get_or_create_bits_within (tree type,
1183 const bit_range &bits,
1184 const svalue *inner_svalue)
1185{
1186 if (const svalue *folded
1187 = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1188 return folded;
1189
1190 bits_within_svalue::key_t key (type, bits, inner_svalue);
1191 if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1192 return *slot;
1193 bits_within_svalue *bits_within_sval
9d804f9b 1194 = new bits_within_svalue (alloc_symbol_id (), type, bits, inner_svalue);
e61ffa20
DM
1195 RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1196 m_bits_within_values_map.put (key, bits_within_sval);
1197 return bits_within_sval;
1198}
1199
808f4dfe
DM
1200/* Return the svalue * that decorates ARG as being unmergeable,
1201 creating it if necessary. */
1202
1203const svalue *
1204region_model_manager::get_or_create_unmergeable (const svalue *arg)
1205{
1206 if (arg->get_kind () == SK_UNMERGEABLE)
1207 return arg;
1208
1209 if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1210 return *slot;
9d804f9b
DM
1211 unmergeable_svalue *unmergeable_sval
1212 = new unmergeable_svalue (alloc_symbol_id (), arg);
808f4dfe
DM
1213 RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1214 m_unmergeable_values_map.put (arg, unmergeable_sval);
1215 return unmergeable_sval;
1216}
1217
1218/* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1219 and ITER_SVAL at POINT, creating it if necessary. */
1220
1221const svalue *
e6fe02d8
DM
1222region_model_manager::
1223get_or_create_widening_svalue (tree type,
1224 const function_point &point,
1225 const svalue *base_sval,
1226 const svalue *iter_sval)
808f4dfe 1227{
2fc20138
DM
1228 gcc_assert (base_sval->get_kind () != SK_WIDENING);
1229 gcc_assert (iter_sval->get_kind () != SK_WIDENING);
808f4dfe
DM
1230 widening_svalue::key_t key (type, point, base_sval, iter_sval);
1231 if (widening_svalue **slot = m_widening_values_map.get (key))
1232 return *slot;
1233 widening_svalue *widening_sval
9d804f9b
DM
1234 = new widening_svalue (alloc_symbol_id (), type, point, base_sval,
1235 iter_sval);
808f4dfe
DM
1236 RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1237 m_widening_values_map.put (key, widening_sval);
1238 return widening_sval;
1239}
1240
1241/* Return the svalue * of type TYPE for the compound values in MAP,
1242 creating it if necessary. */
1243
1244const svalue *
1245region_model_manager::get_or_create_compound_svalue (tree type,
1246 const binding_map &map)
1247{
1248 compound_svalue::key_t tmp_key (type, &map);
1249 if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1250 return *slot;
1251 compound_svalue *compound_sval
9d804f9b 1252 = new compound_svalue (alloc_symbol_id (), type, map);
808f4dfe
DM
1253 RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1254 /* Use make_key rather than reusing the key, so that we use a
1255 ptr to compound_sval's binding_map, rather than the MAP param. */
1256 m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1257 return compound_sval;
1258}
1259
3734527d
DM
1260/* class conjured_purge. */
1261
1262/* Purge state relating to SVAL. */
1263
1264void
1265conjured_purge::purge (const conjured_svalue *sval) const
1266{
1267 m_model->purge_state_involving (sval, m_ctxt);
1268}
1269
808f4dfe 1270/* Return the svalue * of type TYPE for the value conjured for ID_REG
f65f63c4
DM
1271 at STMT (using IDX for any further disambiguation),
1272 creating it if necessary.
3734527d
DM
1273 Use P to purge existing state from the svalue, for the case where a
1274 conjured_svalue would be reused along an execution path. */
808f4dfe
DM
1275
1276const svalue *
1277region_model_manager::get_or_create_conjured_svalue (tree type,
1278 const gimple *stmt,
3734527d 1279 const region *id_reg,
f65f63c4
DM
1280 const conjured_purge &p,
1281 unsigned idx)
808f4dfe 1282{
f65f63c4 1283 conjured_svalue::key_t key (type, stmt, id_reg, idx);
808f4dfe 1284 if (conjured_svalue **slot = m_conjured_values_map.get (key))
3734527d
DM
1285 {
1286 const conjured_svalue *sval = *slot;
1287 /* We're reusing an existing conjured_svalue, perhaps from a different
1288 state within this analysis, or perhaps from an earlier state on this
1289 execution path. For the latter, purge any state involving the "new"
1290 svalue from the current program_state. */
1291 p.purge (sval);
1292 return sval;
1293 }
808f4dfe 1294 conjured_svalue *conjured_sval
f65f63c4 1295 = new conjured_svalue (alloc_symbol_id (), type, stmt, id_reg, idx);
808f4dfe
DM
1296 RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1297 m_conjured_values_map.put (key, conjured_sval);
1298 return conjured_sval;
1299}
1300
ded2c2c0
DM
1301/* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1302 Return a folded svalue, or NULL. */
1303
1304const svalue *
1305region_model_manager::
1306maybe_fold_asm_output_svalue (tree type,
1307 const vec<const svalue *> &inputs)
1308{
1309 /* Unknown inputs should lead to unknown results. */
1310 for (const auto &iter : inputs)
1311 if (iter->get_kind () == SK_UNKNOWN)
1312 return get_or_create_unknown_svalue (type);
1313
1314 return NULL;
1315}
1316
1317/* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1318 asm stmt ASM_STMT, given INPUTS as inputs. */
1319
1320const svalue *
1321region_model_manager::
1322get_or_create_asm_output_svalue (tree type,
1323 const gasm *asm_stmt,
1324 unsigned output_idx,
1325 const vec<const svalue *> &inputs)
1326{
1327 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1328
1329 if (const svalue *folded
1330 = maybe_fold_asm_output_svalue (type, inputs))
1331 return folded;
1332
1333 const char *asm_string = gimple_asm_string (asm_stmt);
1334 const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1335
1336 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1337 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1338 return *slot;
1339 asm_output_svalue *asm_output_sval
9d804f9b
DM
1340 = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1341 noutputs, inputs);
ded2c2c0
DM
1342 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1343 m_asm_output_values_map.put (key, asm_output_sval);
1344 return asm_output_sval;
1345}
1346
bfca9505
DM
1347/* Return the svalue * of type TYPE for OUTPUT_IDX of a deterministic
1348 asm stmt with string ASM_STRING with NUM_OUTPUTS outputs, given
1349 INPUTS as inputs. */
1350
1351const svalue *
1352region_model_manager::
1353get_or_create_asm_output_svalue (tree type,
1354 const char *asm_string,
1355 unsigned output_idx,
1356 unsigned num_outputs,
1357 const vec<const svalue *> &inputs)
1358{
1359 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1360
1361 if (const svalue *folded
1362 = maybe_fold_asm_output_svalue (type, inputs))
1363 return folded;
1364
1365 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1366 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1367 return *slot;
1368 asm_output_svalue *asm_output_sval
9d804f9b
DM
1369 = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1370 num_outputs, inputs);
bfca9505
DM
1371 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1372 m_asm_output_values_map.put (key, asm_output_sval);
1373 return asm_output_sval;
1374}
aee1adf2
DM
1375
1376/* Return the svalue * of type TYPE for the result of a call to FNDECL
1377 with __attribute__((const)), given INPUTS as inputs. */
1378
1379const svalue *
1380region_model_manager::
1381get_or_create_const_fn_result_svalue (tree type,
1382 tree fndecl,
1383 const vec<const svalue *> &inputs)
1384{
1385 gcc_assert (type);
1386 gcc_assert (fndecl);
1387 gcc_assert (DECL_P (fndecl));
1388 gcc_assert (TREE_READONLY (fndecl));
1389 gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1390
1391 const_fn_result_svalue::key_t key (type, fndecl, inputs);
1392 if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1393 return *slot;
1394 const_fn_result_svalue *const_fn_result_sval
9d804f9b 1395 = new const_fn_result_svalue (alloc_symbol_id (), type, fndecl, inputs);
aee1adf2
DM
1396 RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1397 m_const_fn_result_values_map.put (key, const_fn_result_sval);
1398 return const_fn_result_sval;
1399}
1400
808f4dfe
DM
1401/* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1402 attempt to get the character at that offset, returning either
1403 the svalue for the character constant, or NULL if unsuccessful. */
1404
1405const svalue *
1406region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1407 tree byte_offset_cst)
1408{
1409 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1410
1411 /* Adapted from fold_read_from_constant_string. */
1412 scalar_int_mode char_mode;
1413 if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1414 && compare_tree_int (byte_offset_cst,
1415 TREE_STRING_LENGTH (string_cst)) < 0
1416 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1417 &char_mode)
1418 && GET_MODE_SIZE (char_mode) == 1)
1419 {
1420 tree char_cst
1421 = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)),
1422 (TREE_STRING_POINTER (string_cst)
1423 [TREE_INT_CST_LOW (byte_offset_cst)]));
1424 return get_or_create_constant_svalue (char_cst);
1425 }
1426 return NULL;
1427}
1428
1429/* region consolidation. */
1430
1431/* Return the region for FNDECL, creating it if necessary. */
1432
1433const function_region *
1434region_model_manager::get_region_for_fndecl (tree fndecl)
1435{
1436 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1437
1438 function_region **slot = m_fndecls_map.get (fndecl);
1439 if (slot)
1440 return *slot;
1441 function_region *reg
9d804f9b 1442 = new function_region (alloc_symbol_id (), &m_code_region, fndecl);
808f4dfe
DM
1443 m_fndecls_map.put (fndecl, reg);
1444 return reg;
1445}
1446
1447/* Return the region for LABEL, creating it if necessary. */
1448
1449const label_region *
1450region_model_manager::get_region_for_label (tree label)
1451{
1452 gcc_assert (TREE_CODE (label) == LABEL_DECL);
1453
1454 label_region **slot = m_labels_map.get (label);
1455 if (slot)
1456 return *slot;
1457
1458 tree fndecl = DECL_CONTEXT (label);
1459 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1460
1461 const function_region *func_reg = get_region_for_fndecl (fndecl);
1462 label_region *reg
9d804f9b 1463 = new label_region (alloc_symbol_id (), func_reg, label);
808f4dfe
DM
1464 m_labels_map.put (label, reg);
1465 return reg;
1466}
1467
1468/* Return the region for EXPR, creating it if necessary. */
1469
1470const decl_region *
1471region_model_manager::get_region_for_global (tree expr)
1472{
778aca1b 1473 gcc_assert (VAR_P (expr));
808f4dfe
DM
1474
1475 decl_region **slot = m_globals_map.get (expr);
1476 if (slot)
1477 return *slot;
1478 decl_region *reg
9d804f9b 1479 = new decl_region (alloc_symbol_id (), &m_globals_region, expr);
808f4dfe
DM
1480 m_globals_map.put (expr, reg);
1481 return reg;
1482}
1483
3d41408c
DM
1484/* Return the region for an unknown access of type REGION_TYPE,
1485 creating it if necessary.
1486 This is a symbolic_region, where the pointer is an unknown_svalue
1487 of type &REGION_TYPE. */
1488
1489const region *
1490region_model_manager::get_unknown_symbolic_region (tree region_type)
1491{
1492 tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1493 const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1494 return get_symbolic_region (unknown_ptr);
1495}
1496
808f4dfe
DM
1497/* Return the region that describes accessing field FIELD of PARENT,
1498 creating it if necessary. */
1499
1500const region *
1501region_model_manager::get_field_region (const region *parent, tree field)
1502{
00cb0f58
DM
1503 gcc_assert (TREE_CODE (field) == FIELD_DECL);
1504
11d4ec5d
DM
1505 /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1506 if (parent->symbolic_for_unknown_ptr_p ())
3d41408c 1507 return get_unknown_symbolic_region (TREE_TYPE (field));
11d4ec5d 1508
808f4dfe
DM
1509 field_region::key_t key (parent, field);
1510 if (field_region *reg = m_field_regions.get (key))
1511 return reg;
1512
1513 field_region *field_reg
9d804f9b 1514 = new field_region (alloc_symbol_id (), parent, field);
808f4dfe
DM
1515 m_field_regions.put (key, field_reg);
1516 return field_reg;
1517}
1518
1519/* Return the region that describes accessing the element of type
1520 ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1521
1522const region *
1523region_model_manager::get_element_region (const region *parent,
1524 tree element_type,
1525 const svalue *index)
1526{
3d41408c
DM
1527 /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1528 if (parent->symbolic_for_unknown_ptr_p ())
1529 return get_unknown_symbolic_region (element_type);
1530
808f4dfe
DM
1531 element_region::key_t key (parent, element_type, index);
1532 if (element_region *reg = m_element_regions.get (key))
1533 return reg;
1534
1535 element_region *element_reg
9d804f9b 1536 = new element_region (alloc_symbol_id (), parent, element_type, index);
808f4dfe
DM
1537 m_element_regions.put (key, element_reg);
1538 return element_reg;
1539}
1540
1541/* Return the region that describes accessing the subregion of type
1542 ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1543 necessary. */
1544
1545const region *
1546region_model_manager::get_offset_region (const region *parent,
1547 tree type,
1548 const svalue *byte_offset)
1549{
3d41408c
DM
1550 /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1551 if (parent->symbolic_for_unknown_ptr_p ())
1552 return get_unknown_symbolic_region (type);
1553
808f4dfe
DM
1554 /* If BYTE_OFFSET is zero, return PARENT. */
1555 if (tree cst_offset = byte_offset->maybe_get_constant ())
1556 if (zerop (cst_offset))
1557 return get_cast_region (parent, type);
1558
1559 /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1560 to OFFSET_REGION(REG, (X + Y)). */
1561 if (const offset_region *parent_offset_reg
1562 = parent->dyn_cast_offset_region ())
1563 {
1564 const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1565 const svalue *sval_sum
1566 = get_or_create_binop (byte_offset->get_type (),
1567 PLUS_EXPR, sval_x, byte_offset);
1568 return get_offset_region (parent->get_parent_region (), type, sval_sum);
1569 }
1570
1571 offset_region::key_t key (parent, type, byte_offset);
1572 if (offset_region *reg = m_offset_regions.get (key))
1573 return reg;
1574
1575 offset_region *offset_reg
9d804f9b 1576 = new offset_region (alloc_symbol_id (), parent, type, byte_offset);
808f4dfe
DM
1577 m_offset_regions.put (key, offset_reg);
1578 return offset_reg;
1579}
1580
e61ffa20
DM
1581/* Return the region that describes accessing the subregion of type
1582 TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1583
1584const region *
1585region_model_manager::get_sized_region (const region *parent,
1586 tree type,
1587 const svalue *byte_size_sval)
1588{
3d41408c
DM
1589 if (parent->symbolic_for_unknown_ptr_p ())
1590 return get_unknown_symbolic_region (type);
1591
e61ffa20
DM
1592 if (byte_size_sval->get_type () != size_type_node)
1593 byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1594
1595 /* If PARENT is already that size, return it. */
1596 const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1597 if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1598 if (tree size_cst = byte_size_sval->maybe_get_constant ())
1599 {
1600 tree comparison
1601 = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1602 if (comparison == boolean_true_node)
1603 return parent;
1604 }
1605
1606 sized_region::key_t key (parent, type, byte_size_sval);
1607 if (sized_region *reg = m_sized_regions.get (key))
1608 return reg;
1609
1610 sized_region *sized_reg
9d804f9b 1611 = new sized_region (alloc_symbol_id (), parent, type, byte_size_sval);
e61ffa20
DM
1612 m_sized_regions.put (key, sized_reg);
1613 return sized_reg;
1614}
1615
808f4dfe
DM
1616/* Return the region that describes accessing PARENT_REGION as if
1617 it were of type TYPE, creating it if necessary. */
1618
1619const region *
1620region_model_manager::get_cast_region (const region *original_region,
1621 tree type)
1622{
1623 /* If types match, return ORIGINAL_REGION. */
1624 if (type == original_region->get_type ())
1625 return original_region;
1626
3d41408c
DM
1627 if (original_region->symbolic_for_unknown_ptr_p ())
1628 return get_unknown_symbolic_region (type);
1629
808f4dfe
DM
1630 cast_region::key_t key (original_region, type);
1631 if (cast_region *reg = m_cast_regions.get (key))
1632 return reg;
1633
1634 cast_region *cast_reg
9d804f9b 1635 = new cast_region (alloc_symbol_id (), original_region, type);
808f4dfe
DM
1636 m_cast_regions.put (key, cast_reg);
1637 return cast_reg;
1638}
1639
1640/* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1641 if necessary. CALLING_FRAME may be NULL. */
1642
1643const frame_region *
1644region_model_manager::get_frame_region (const frame_region *calling_frame,
1645 function *fun)
1646{
1647 int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1648
1649 frame_region::key_t key (calling_frame, fun);
1650 if (frame_region *reg = m_frame_regions.get (key))
1651 return reg;
1652
1653 frame_region *frame_reg
9d804f9b 1654 = new frame_region (alloc_symbol_id (), &m_stack_region, calling_frame,
808f4dfe
DM
1655 fun, index);
1656 m_frame_regions.put (key, frame_reg);
1657 return frame_reg;
1658}
1659
1660/* Return the region that describes dereferencing SVAL, creating it
1661 if necessary. */
1662
1663const region *
1664region_model_manager::get_symbolic_region (const svalue *sval)
1665{
1666 symbolic_region::key_t key (&m_root_region, sval);
1667 if (symbolic_region *reg = m_symbolic_regions.get (key))
1668 return reg;
1669
1670 symbolic_region *symbolic_reg
9d804f9b 1671 = new symbolic_region (alloc_symbol_id (), &m_root_region, sval);
808f4dfe
DM
1672 m_symbolic_regions.put (key, symbolic_reg);
1673 return symbolic_reg;
1674}
1675
1676/* Return the region that describes accessing STRING_CST, creating it
1677 if necessary. */
1678
1679const string_region *
1680region_model_manager::get_region_for_string (tree string_cst)
1681{
1682 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1683
1684 string_region **slot = m_string_map.get (string_cst);
1685 if (slot)
1686 return *slot;
1687 string_region *reg
9d804f9b 1688 = new string_region (alloc_symbol_id (), &m_root_region, string_cst);
808f4dfe
DM
1689 m_string_map.put (string_cst, reg);
1690 return reg;
1691}
1692
93e759fc
DM
1693/* Return the region that describes accessing BITS within PARENT as TYPE,
1694 creating it if necessary. */
1695
1696const region *
1697region_model_manager::get_bit_range (const region *parent, tree type,
1698 const bit_range &bits)
1699{
1700 gcc_assert (parent);
1701
3d41408c
DM
1702 if (parent->symbolic_for_unknown_ptr_p ())
1703 return get_unknown_symbolic_region (type);
1704
93e759fc
DM
1705 bit_range_region::key_t key (parent, type, bits);
1706 if (bit_range_region *reg = m_bit_range_regions.get (key))
1707 return reg;
1708
1709 bit_range_region *bit_range_reg
9d804f9b 1710 = new bit_range_region (alloc_symbol_id (), parent, type, bits);
93e759fc
DM
1711 m_bit_range_regions.put (key, bit_range_reg);
1712 return bit_range_reg;
1713}
1714
2402dc6b
DM
1715/* Return the region that describes accessing the IDX-th variadic argument
1716 within PARENT_FRAME, creating it if necessary. */
1717
1718const var_arg_region *
1719region_model_manager::get_var_arg_region (const frame_region *parent_frame,
1720 unsigned idx)
1721{
1722 gcc_assert (parent_frame);
1723
1724 var_arg_region::key_t key (parent_frame, idx);
1725 if (var_arg_region *reg = m_var_arg_regions.get (key))
1726 return reg;
1727
1728 var_arg_region *var_arg_reg
9d804f9b 1729 = new var_arg_region (alloc_symbol_id (), parent_frame, idx);
2402dc6b
DM
1730 m_var_arg_regions.put (key, var_arg_reg);
1731 return var_arg_reg;
1732}
1733
808f4dfe
DM
1734/* If we see a tree code we don't know how to handle, rather than
1735 ICE or generate bogus results, create a dummy region, and notify
1736 CTXT so that it can mark the new state as being not properly
1737 modelled. The exploded graph can then stop exploring that path,
1738 since any diagnostics we might issue will have questionable
1739 validity. */
1740
1741const region *
1742region_model_manager::
1743get_region_for_unexpected_tree_code (region_model_context *ctxt,
1744 tree t,
1745 const dump_location_t &loc)
1746{
808f4dfe
DM
1747 tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1748 region *new_reg
9d804f9b 1749 = new unknown_region (alloc_symbol_id (), &m_root_region, type);
b00a8304
DM
1750 if (ctxt)
1751 ctxt->on_unexpected_tree_code (t, loc);
808f4dfe
DM
1752 return new_reg;
1753}
1754
ce917b04
DM
1755/* Return a region describing a heap-allocated block of memory.
1756 Reuse an existing heap_allocated_region is its id is not within
1757 BASE_REGS_IN_USE. */
808f4dfe
DM
1758
1759const region *
ce917b04 1760region_model_manager::
7dc0ecaf 1761get_or_create_region_for_heap_alloc (const bitmap &base_regs_in_use)
808f4dfe 1762{
ce917b04
DM
1763 /* Try to reuse an existing region, if it's unreferenced in the
1764 client state. */
1765 for (auto existing_reg : m_managed_dynamic_regions)
1766 if (!bitmap_bit_p (base_regs_in_use, existing_reg->get_id ()))
1767 if (existing_reg->get_kind () == RK_HEAP_ALLOCATED)
1768 return existing_reg;
1769
1770 /* All existing ones (if any) are in use; create a new one. */
808f4dfe 1771 region *reg
9d804f9b 1772 = new heap_allocated_region (alloc_symbol_id (), &m_heap_region);
808f4dfe
DM
1773 m_managed_dynamic_regions.safe_push (reg);
1774 return reg;
1775}
1776
1777/* Return a new region describing a block of memory allocated within FRAME. */
1778
1779const region *
1780region_model_manager::create_region_for_alloca (const frame_region *frame)
1781{
1782 gcc_assert (frame);
9d804f9b 1783 region *reg = new alloca_region (alloc_symbol_id (), frame);
808f4dfe
DM
1784 m_managed_dynamic_regions.safe_push (reg);
1785 return reg;
1786}
1787
1788/* Log OBJ to LOGGER. */
1789
1790template <typename T>
1791static void
1792log_managed_object (logger *logger, const T *obj)
1793{
1794 logger->start_log_line ();
1795 pretty_printer *pp = logger->get_printer ();
1796 pp_string (pp, " ");
1797 obj->dump_to_pp (pp, true);
1798 logger->end_log_line ();
1799}
1800
1801/* Specialization for frame_region, which also logs the count of locals
1802 managed by the frame_region. */
1803
1804template <>
1805void
1806log_managed_object (logger *logger, const frame_region *obj)
1807{
1808 logger->start_log_line ();
1809 pretty_printer *pp = logger->get_printer ();
1810 pp_string (pp, " ");
1811 obj->dump_to_pp (pp, true);
1812 pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
1813 logger->end_log_line ();
1814}
1815
1816/* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1817 If SHOW_OBJS is true, also dump the objects themselves. */
1818
1819template <typename K, typename T>
1820static void
1821log_uniq_map (logger *logger, bool show_objs, const char *title,
1822 const hash_map<K, T*> &uniq_map)
1823{
3989337e 1824 logger->log (" # %s: %li", title, (long)uniq_map.elements ());
b0702ac5
DM
1825 if (!show_objs)
1826 return;
1827 auto_vec<const T *> vec_objs (uniq_map.elements ());
1828 for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
1829 iter != uniq_map.end (); ++iter)
1830 vec_objs.quick_push ((*iter).second);
1831
1832 vec_objs.qsort (T::cmp_ptr_ptr);
1833
1834 unsigned i;
1835 const T *obj;
1836 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1837 log_managed_object<T> (logger, obj);
808f4dfe
DM
1838}
1839
1840/* Dump the number of objects that were managed by MAP to LOGGER.
1841 If SHOW_OBJS is true, also dump the objects themselves. */
1842
1843template <typename T>
1844static void
1845log_uniq_map (logger *logger, bool show_objs, const char *title,
1846 const consolidation_map<T> &map)
1847{
3989337e 1848 logger->log (" # %s: %li", title, (long)map.elements ());
b0702ac5
DM
1849 if (!show_objs)
1850 return;
1851
1852 auto_vec<const T *> vec_objs (map.elements ());
1853 for (typename consolidation_map<T>::iterator iter = map.begin ();
1854 iter != map.end (); ++iter)
1855 vec_objs.quick_push ((*iter).second);
1856
1857 vec_objs.qsort (T::cmp_ptr_ptr);
1858
1859 unsigned i;
1860 const T *obj;
1861 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1862 log_managed_object<T> (logger, obj);
808f4dfe
DM
1863}
1864
1865/* Dump the number of objects of each class that were managed by this
1866 manager to LOGGER.
1867 If SHOW_OBJS is true, also dump the objects themselves. */
1868
1869void
1870region_model_manager::log_stats (logger *logger, bool show_objs) const
1871{
1872 LOG_SCOPE (logger);
bb8e93eb
DM
1873 logger->log ("call string consolidation");
1874 m_empty_call_string.recursive_log (logger);
9d804f9b 1875 logger->log ("next symbol id: %i", m_next_symbol_id);
808f4dfe
DM
1876 logger->log ("svalue consolidation");
1877 log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
1878 log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
1879 if (m_unknown_NULL)
1880 log_managed_object (logger, m_unknown_NULL);
1881 log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
1882 log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
1883 log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
1884 log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
1885 log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
1886 log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
1887 log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
e61ffa20
DM
1888 log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
1889 log_uniq_map (logger, show_objs, "bits_within_svalue",
1890 m_bits_within_values_map);
808f4dfe
DM
1891 log_uniq_map (logger, show_objs, "unmergeable_svalue",
1892 m_unmergeable_values_map);
1893 log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
1894 log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
1895 log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
ded2c2c0
DM
1896 log_uniq_map (logger, show_objs, "asm_output_svalue",
1897 m_asm_output_values_map);
aee1adf2
DM
1898 log_uniq_map (logger, show_objs, "const_fn_result_svalue",
1899 m_const_fn_result_values_map);
ded2c2c0 1900
808f4dfe
DM
1901 logger->log ("max accepted svalue num_nodes: %i",
1902 m_max_complexity.m_num_nodes);
1903 logger->log ("max accepted svalue max_depth: %i",
1904 m_max_complexity.m_max_depth);
1905
1906 logger->log ("region consolidation");
808f4dfe
DM
1907 log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
1908 log_uniq_map (logger, show_objs, "label_region", m_labels_map);
1909 log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
1910 log_uniq_map (logger, show_objs, "field_region", m_field_regions);
1911 log_uniq_map (logger, show_objs, "element_region", m_element_regions);
1912 log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
e61ffa20 1913 log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
808f4dfe
DM
1914 log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
1915 log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
1916 log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
1917 log_uniq_map (logger, show_objs, "string_region", m_string_map);
93e759fc 1918 log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
2402dc6b 1919 log_uniq_map (logger, show_objs, "var_arg_region", m_var_arg_regions);
808f4dfe
DM
1920 logger->log (" # managed dynamic regions: %i",
1921 m_managed_dynamic_regions.length ());
1922 m_store_mgr.log_stats (logger, show_objs);
8ca7fa84 1923 m_range_mgr->log_stats (logger, show_objs);
808f4dfe
DM
1924}
1925
1926/* Dump the number of objects of each class that were managed by this
1927 manager to LOGGER.
1928 If SHOW_OBJS is true, also dump the objects themselves.
1929 This is here so it can use log_uniq_map. */
1930
1931void
1932store_manager::log_stats (logger *logger, bool show_objs) const
1933{
1934 LOG_SCOPE (logger);
1935 log_uniq_map (logger, show_objs, "concrete_binding",
1936 m_concrete_binding_key_mgr);
1937 log_uniq_map (logger, show_objs, "symbolic_binding",
1938 m_symbolic_binding_key_mgr);
1939}
1940
5f6197d7
DM
1941/* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
1942 (using -fdump-analyzer-untracked). */
1943
1944static void
1945dump_untracked_region (const decl_region *decl_reg)
1946{
1947 tree decl = decl_reg->get_decl ();
1948 if (TREE_CODE (decl) != VAR_DECL)
1949 return;
c788a0ea
DM
1950 /* For now, don't emit the status of decls in the constant pool, to avoid
1951 differences in DejaGnu test results between targets that use these vs
1952 those that don't.
1953 (Eventually these decls should probably be untracked and we should test
1954 for that, but that's not stage 4 material). */
1955 if (DECL_IN_CONSTANT_POOL (decl))
1956 return;
5f6197d7
DM
1957 warning_at (DECL_SOURCE_LOCATION (decl), 0,
1958 "track %qD: %s",
1959 decl, (decl_reg->tracked_p () ? "yes" : "no"));
1960}
1961
1962/* Implementation of -fdump-analyzer-untracked. */
1963
1964void
1965region_model_manager::dump_untracked_regions () const
1966{
1967 for (auto iter : m_globals_map)
1968 {
1969 const decl_region *decl_reg = iter.second;
1970 dump_untracked_region (decl_reg);
1971 }
1972 for (auto frame_iter : m_frame_regions)
1973 {
1974 const frame_region *frame_reg = frame_iter.second;
1975 frame_reg->dump_untracked_regions ();
1976 }
1977}
1978
1979void
1980frame_region::dump_untracked_regions () const
1981{
1982 for (auto iter : m_locals)
1983 {
1984 const decl_region *decl_reg = iter.second;
1985 dump_untracked_region (decl_reg);
1986 }
1987}
1988
808f4dfe
DM
1989} // namespace ana
1990
1991#endif /* #if ENABLE_ANALYZER */