]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model-manager.cc
analyzer: fix taint false positives with UNKNOWN [PR112850]
[thirdparty/gcc.git] / gcc / analyzer / region-model-manager.cc
CommitLineData
808f4dfe 1/* Consolidation of svalues and regions.
83ffe9cd 2 Copyright (C) 2020-2023 Free Software Foundation, Inc.
808f4dfe
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
6341f14e 22#define INCLUDE_MEMORY
808f4dfe
DM
23#include "system.h"
24#include "coretypes.h"
25#include "tree.h"
26#include "diagnostic-core.h"
27#include "gimple-pretty-print.h"
28#include "function.h"
29#include "basic-block.h"
30#include "gimple.h"
31#include "gimple-iterator.h"
32#include "diagnostic-core.h"
33#include "graphviz.h"
34#include "options.h"
35#include "cgraph.h"
36#include "tree-dfa.h"
37#include "stringpool.h"
38#include "convert.h"
39#include "target.h"
40#include "fold-const.h"
41#include "tree-pretty-print.h"
808f4dfe 42#include "bitmap.h"
808f4dfe
DM
43#include "analyzer/analyzer.h"
44#include "analyzer/analyzer-logging.h"
45#include "ordered-hash-map.h"
46#include "options.h"
808f4dfe
DM
47#include "analyzer/supergraph.h"
48#include "sbitmap.h"
49#include "analyzer/call-string.h"
50#include "analyzer/program-point.h"
51#include "analyzer/store.h"
52#include "analyzer/region-model.h"
8ca7fa84 53#include "analyzer/constraint-manager.h"
808f4dfe
DM
54
55#if ENABLE_ANALYZER
56
57namespace ana {
58
59/* class region_model_manager. */
60
61/* region_model_manager's ctor. */
62
11a2ff8d
DM
63region_model_manager::region_model_manager (logger *logger)
64: m_logger (logger),
9d804f9b 65 m_next_symbol_id (0),
bb8e93eb 66 m_empty_call_string (),
9d804f9b
DM
67 m_root_region (alloc_symbol_id ()),
68 m_stack_region (alloc_symbol_id (), &m_root_region),
69 m_heap_region (alloc_symbol_id (), &m_root_region),
808f4dfe 70 m_unknown_NULL (NULL),
4f34f8cc 71 m_checking_feasibility (false),
808f4dfe 72 m_max_complexity (0, 0),
9d804f9b 73 m_code_region (alloc_symbol_id (), &m_root_region),
808f4dfe 74 m_fndecls_map (), m_labels_map (),
9d804f9b 75 m_globals_region (alloc_symbol_id (), &m_root_region),
808f4dfe 76 m_globals_map (),
9d804f9b
DM
77 m_thread_local_region (alloc_symbol_id (), &m_root_region),
78 m_errno_region (alloc_symbol_id (), &m_thread_local_region),
8ca7fa84 79 m_store_mgr (this),
07e30160
DM
80 m_range_mgr (new bounded_ranges_manager ()),
81 m_known_fn_mgr (logger)
808f4dfe
DM
82{
83}
84
85/* region_model_manager's dtor. Delete all of the managed svalues
86 and regions. */
87
88region_model_manager::~region_model_manager ()
89{
90 /* Delete consolidated svalues. */
91 for (constants_map_t::iterator iter = m_constants_map.begin ();
92 iter != m_constants_map.end (); ++iter)
93 delete (*iter).second;
94 for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
95 iter != m_unknowns_map.end (); ++iter)
96 delete (*iter).second;
97 delete m_unknown_NULL;
808f4dfe
DM
98 for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
99 iter != m_poisoned_values_map.end (); ++iter)
100 delete (*iter).second;
99988b0e
DM
101 for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
102 iter != m_setjmp_values_map.end (); ++iter)
103 delete (*iter).second;
808f4dfe
DM
104 for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
105 iter != m_initial_values_map.end (); ++iter)
106 delete (*iter).second;
107 for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
108 iter != m_pointer_values_map.end (); ++iter)
109 delete (*iter).second;
110 for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
111 iter != m_unaryop_values_map.end (); ++iter)
112 delete (*iter).second;
113 for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
114 iter != m_binop_values_map.end (); ++iter)
115 delete (*iter).second;
116 for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
117 iter != m_sub_values_map.end (); ++iter)
118 delete (*iter).second;
99988b0e
DM
119 for (auto iter : m_repeated_values_map)
120 delete iter.second;
121 for (auto iter : m_bits_within_values_map)
122 delete iter.second;
808f4dfe
DM
123 for (unmergeable_values_map_t::iterator iter
124 = m_unmergeable_values_map.begin ();
125 iter != m_unmergeable_values_map.end (); ++iter)
126 delete (*iter).second;
127 for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
128 iter != m_widening_values_map.end (); ++iter)
129 delete (*iter).second;
130 for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
131 iter != m_compound_values_map.end (); ++iter)
132 delete (*iter).second;
133 for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
134 iter != m_conjured_values_map.end (); ++iter)
135 delete (*iter).second;
99988b0e
DM
136 for (auto iter : m_asm_output_values_map)
137 delete iter.second;
138 for (auto iter : m_const_fn_result_values_map)
139 delete iter.second;
808f4dfe
DM
140
141 /* Delete consolidated regions. */
142 for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
143 iter != m_fndecls_map.end (); ++iter)
144 delete (*iter).second;
145 for (labels_map_t::iterator iter = m_labels_map.begin ();
146 iter != m_labels_map.end (); ++iter)
147 delete (*iter).second;
148 for (globals_map_t::iterator iter = m_globals_map.begin ();
149 iter != m_globals_map.end (); ++iter)
150 delete (*iter).second;
151 for (string_map_t::iterator iter = m_string_map.begin ();
152 iter != m_string_map.end (); ++iter)
153 delete (*iter).second;
8ca7fa84
DM
154
155 delete m_range_mgr;
808f4dfe
DM
156}
157
158/* Return true if C exceeds the complexity limit for svalues. */
159
160bool
161region_model_manager::too_complex_p (const complexity &c) const
162{
163 if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
164 return true;
165 return false;
166}
167
168/* If SVAL exceeds the complexity limit for svalues, delete it
169 and return true.
170 Otherwise update m_max_complexity and return false. */
171
172bool
173region_model_manager::reject_if_too_complex (svalue *sval)
174{
4f34f8cc 175 if (m_checking_feasibility)
60933a14
DM
176 return false;
177
808f4dfe
DM
178 const complexity &c = sval->get_complexity ();
179 if (!too_complex_p (c))
180 {
181 if (m_max_complexity.m_num_nodes < c.m_num_nodes)
182 m_max_complexity.m_num_nodes = c.m_num_nodes;
183 if (m_max_complexity.m_max_depth < c.m_max_depth)
184 m_max_complexity.m_max_depth = c.m_max_depth;
185 return false;
186 }
187
08b7462d
DM
188 pretty_printer pp;
189 pp_format_decoder (&pp) = default_tree_printer;
190 sval->dump_to_pp (&pp, true);
191 if (warning_at (input_location, OPT_Wanalyzer_symbol_too_complex,
192 "symbol too complicated: %qs",
193 pp_formatted_text (&pp)))
194 inform (input_location,
195 "max_depth %i exceeds --param=analyzer-max-svalue-depth=%i",
196 c.m_max_depth, param_analyzer_max_svalue_depth);
197
808f4dfe
DM
198 delete sval;
199 return true;
200}
201
202/* Macro for imposing a complexity limit on svalues, for use within
203 region_model_manager member functions.
204
205 If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
206 value of the same type.
207 Otherwise update m_max_complexity and carry on. */
208
209#define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
210 do { \
211 svalue *sval_ = (SVAL); \
212 tree type_ = sval_->get_type (); \
213 if (reject_if_too_complex (sval_)) \
214 return get_or_create_unknown_svalue (type_); \
215 } while (0)
216
217/* svalue consolidation. */
218
219/* Return the svalue * for a constant_svalue for CST_EXPR,
220 creating it if necessary.
221 The constant_svalue instances are reused, based on pointer equality
222 of trees */
223
224const svalue *
225region_model_manager::get_or_create_constant_svalue (tree cst_expr)
226{
227 gcc_assert (cst_expr);
2aefe248 228 gcc_assert (CONSTANT_CLASS_P (cst_expr));
808f4dfe
DM
229
230 constant_svalue **slot = m_constants_map.get (cst_expr);
231 if (slot)
232 return *slot;
9d804f9b
DM
233 constant_svalue *cst_sval
234 = new constant_svalue (alloc_symbol_id (), cst_expr);
808f4dfe
DM
235 RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
236 m_constants_map.put (cst_expr, cst_sval);
237 return cst_sval;
238}
239
1aff29d4
DM
240/* Return the svalue * for a constant_svalue for the INTEGER_CST
241 for VAL of type TYPE, creating it if necessary. */
242
243const svalue *
0e466e97
DM
244region_model_manager::get_or_create_int_cst (tree type,
245 const poly_wide_int_ref &cst)
1aff29d4
DM
246{
247 gcc_assert (type);
b86c0fe3 248 gcc_assert (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type));
0e466e97 249 tree tree_cst = wide_int_to_tree (type, cst);
1aff29d4
DM
250 return get_or_create_constant_svalue (tree_cst);
251}
252
dcfc7ac9
DM
253/* Return the svalue * for the constant_svalue for the NULL pointer
254 of POINTER_TYPE, creating it if necessary. */
255
256const svalue *
257region_model_manager::get_or_create_null_ptr (tree pointer_type)
258{
259 gcc_assert (pointer_type);
260 gcc_assert (POINTER_TYPE_P (pointer_type));
261 return get_or_create_int_cst (pointer_type, 0);
262}
263
808f4dfe
DM
264/* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
265 creating it if necessary.
266 The unknown_svalue instances are reused, based on pointer equality
267 of the types */
268
269const svalue *
270region_model_manager::get_or_create_unknown_svalue (tree type)
271{
4f34f8cc
DM
272 /* Don't create unknown values when doing feasibility testing;
273 instead, create a unique svalue. */
274 if (m_checking_feasibility)
275 return create_unique_svalue (type);
276
808f4dfe
DM
277 /* Special-case NULL, so that the hash_map can use NULL as the
278 "empty" value. */
279 if (type == NULL_TREE)
280 {
281 if (!m_unknown_NULL)
9d804f9b 282 m_unknown_NULL = new unknown_svalue (alloc_symbol_id (), type);
808f4dfe
DM
283 return m_unknown_NULL;
284 }
285
286 unknown_svalue **slot = m_unknowns_map.get (type);
287 if (slot)
288 return *slot;
9d804f9b 289 unknown_svalue *sval = new unknown_svalue (alloc_symbol_id (), type);
808f4dfe
DM
290 m_unknowns_map.put (type, sval);
291 return sval;
292}
293
4f34f8cc
DM
294/* Return a freshly-allocated svalue of TYPE, owned by this manager. */
295
296const svalue *
297region_model_manager::create_unique_svalue (tree type)
298{
9d804f9b 299 svalue *sval = new placeholder_svalue (alloc_symbol_id (), type, "unique");
4f34f8cc
DM
300 m_managed_dynamic_svalues.safe_push (sval);
301 return sval;
302}
303
808f4dfe
DM
304/* Return the svalue * for the initial value of REG, creating it if
305 necessary. */
306
307const svalue *
1eb90f46 308region_model_manager::get_or_create_initial_value (const region *reg,
309 bool check_poisoned)
808f4dfe 310{
1eb90f46 311 if (!reg->can_have_initial_svalue_p () && check_poisoned)
33255ad3
DM
312 return get_or_create_poisoned_svalue (POISON_KIND_UNINIT,
313 reg->get_type ());
314
808f4dfe
DM
315 /* The initial value of a cast is a cast of the initial value. */
316 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
317 {
318 const region *original_reg = cast_reg->get_original_region ();
319 return get_or_create_cast (cast_reg->get_type (),
320 get_or_create_initial_value (original_reg));
321 }
322
46cb27e5
DM
323 /* Simplify:
324 INIT_VAL(ELEMENT_REG(STRING_REG), CONSTANT_SVAL)
325 to:
326 CONSTANT_SVAL(STRING[N]). */
327 if (const element_region *element_reg = reg->dyn_cast_element_region ())
328 if (tree cst_idx = element_reg->get_index ()->maybe_get_constant ())
329 if (const string_region *string_reg
330 = element_reg->get_parent_region ()->dyn_cast_string_region ())
331 if (tree_fits_shwi_p (cst_idx))
332 {
333 HOST_WIDE_INT idx = tree_to_shwi (cst_idx);
334 tree string_cst = string_reg->get_string_cst ();
335 if (idx >= 0 && idx <= TREE_STRING_LENGTH (string_cst))
336 {
337 int ch = TREE_STRING_POINTER (string_cst)[idx];
338 return get_or_create_int_cst (reg->get_type (), ch);
339 }
340 }
341
11d4ec5d
DM
342 /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
343 if (reg->symbolic_for_unknown_ptr_p ())
344 return get_or_create_unknown_svalue (reg->get_type ());
345
808f4dfe
DM
346 if (initial_svalue **slot = m_initial_values_map.get (reg))
347 return *slot;
9d804f9b
DM
348 initial_svalue *initial_sval
349 = new initial_svalue (alloc_symbol_id (), reg->get_type (), reg);
808f4dfe
DM
350 RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
351 m_initial_values_map.put (reg, initial_sval);
352 return initial_sval;
353}
354
355/* Return the svalue * for R using type TYPE, creating it if
356 necessary. */
357
358const svalue *
359region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
360 tree type)
361{
362 setjmp_svalue::key_t key (r, type);
363 if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
364 return *slot;
9d804f9b 365 setjmp_svalue *setjmp_sval = new setjmp_svalue (r, alloc_symbol_id (), type);
808f4dfe
DM
366 RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
367 m_setjmp_values_map.put (key, setjmp_sval);
368 return setjmp_sval;
369}
370
371/* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
372 necessary. */
373
374const svalue *
375region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
376 tree type)
377{
378 poisoned_svalue::key_t key (kind, type);
379 if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
380 return *slot;
9d804f9b
DM
381 poisoned_svalue *poisoned_sval
382 = new poisoned_svalue (kind, alloc_symbol_id (), type);
808f4dfe
DM
383 RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
384 m_poisoned_values_map.put (key, poisoned_sval);
385 return poisoned_sval;
386}
387
388/* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
389 creating it if necessary. */
390
391const svalue *
392region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
393{
394 /* If this is a symbolic region from dereferencing a pointer, and the types
395 match, then return the original pointer. */
396 if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
397 if (ptr_type == sym_reg->get_pointer ()->get_type ())
398 return sym_reg->get_pointer ();
399
400 region_svalue::key_t key (ptr_type, pointee);
401 if (region_svalue **slot = m_pointer_values_map.get (key))
402 return *slot;
9d804f9b
DM
403 region_svalue *sval
404 = new region_svalue (alloc_symbol_id (), ptr_type, pointee);
808f4dfe
DM
405 RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
406 m_pointer_values_map.put (key, sval);
407 return sval;
408}
409
410/* Subroutine of region_model_manager::get_or_create_unaryop.
411 Attempt to fold the inputs and return a simpler svalue *.
412 Otherwise, return NULL. */
413
414const svalue *
415region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
416 const svalue *arg)
417{
418 /* Ops on "unknown" are also unknown. */
419 if (arg->get_kind () == SK_UNKNOWN)
420 return get_or_create_unknown_svalue (type);
a113b143
DM
421 /* Likewise for "poisoned". */
422 else if (const poisoned_svalue *poisoned_sval
423 = arg->dyn_cast_poisoned_svalue ())
424 return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
425 type);
426
427 gcc_assert (arg->can_have_associated_state_p ());
808f4dfe
DM
428
429 switch (op)
430 {
431 default: break;
ecdb9322 432 case VIEW_CONVERT_EXPR:
808f4dfe
DM
433 case NOP_EXPR:
434 {
435 /* Handle redundant casts. */
436 if (arg->get_type ()
437 && useless_type_conversion_p (arg->get_type (), type))
438 return arg;
439
440 /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
441 => "cast<TYPE> (innermost_arg)",
442 unless INNER_TYPE is narrower than TYPE. */
443 if (const svalue *innermost_arg = arg->maybe_undo_cast ())
444 {
445 tree inner_type = arg->get_type ();
446 if (TYPE_SIZE (type)
447 && TYPE_SIZE (inner_type)
448 && (fold_binary (LE_EXPR, boolean_type_node,
449 TYPE_SIZE (type), TYPE_SIZE (inner_type))
450 == boolean_true_node))
451 return maybe_fold_unaryop (type, op, innermost_arg);
452 }
111fd515
DM
453 /* Avoid creating symbolic regions for pointer casts by
454 simplifying (T*)(&REGION) to ((T*)&REGION). */
455 if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
456 if (POINTER_TYPE_P (type)
457 && region_sval->get_type ()
458 && POINTER_TYPE_P (region_sval->get_type ()))
459 return get_ptr_svalue (type, region_sval->get_pointee ());
808f4dfe
DM
460 }
461 break;
462 case TRUTH_NOT_EXPR:
463 {
464 /* Invert comparisons e.g. "!(x == y)" => "x != y". */
465 if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
466 if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
467 {
468 enum tree_code inv_op
469 = invert_tree_comparison (binop->get_op (),
470 HONOR_NANS (binop->get_type ()));
471 if (inv_op != ERROR_MARK)
472 return get_or_create_binop (binop->get_type (), inv_op,
473 binop->get_arg0 (),
474 binop->get_arg1 ());
475 }
476 }
477 break;
7f42f7ad
DM
478 case NEGATE_EXPR:
479 {
480 /* -(-(VAL)) is VAL, for integer types. */
481 if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ())
482 if (unaryop->get_op () == NEGATE_EXPR
483 && type == unaryop->get_type ()
484 && type
485 && INTEGRAL_TYPE_P (type))
486 return unaryop->get_arg ();
487 }
488 break;
808f4dfe
DM
489 }
490
491 /* Constants. */
492 if (tree cst = arg->maybe_get_constant ())
493 if (tree result = fold_unary (op, type, cst))
2aefe248
DM
494 {
495 if (CONSTANT_CLASS_P (result))
496 return get_or_create_constant_svalue (result);
497
498 /* fold_unary can return casts of constants; try to handle them. */
499 if (op != NOP_EXPR
500 && type
501 && TREE_CODE (result) == NOP_EXPR
502 && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
503 {
504 const svalue *inner_cst
505 = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
506 return get_or_create_cast (type,
507 get_or_create_cast (TREE_TYPE (result),
508 inner_cst));
509 }
510 }
808f4dfe
DM
511
512 return NULL;
513}
514
515/* Return the svalue * for an unary operation OP on ARG with a result of
516 type TYPE, creating it if necessary. */
517
518const svalue *
519region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
520 const svalue *arg)
521{
522 if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
523 return folded;
524 unaryop_svalue::key_t key (type, op, arg);
525 if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
526 return *slot;
9d804f9b
DM
527 unaryop_svalue *unaryop_sval
528 = new unaryop_svalue (alloc_symbol_id (), type, op, arg);
808f4dfe
DM
529 RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
530 m_unaryop_values_map.put (key, unaryop_sval);
531 return unaryop_sval;
532}
533
ecdb9322
DM
534/* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
535 Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
536 of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
537 and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
538 on. */
539
540static enum tree_code
541get_code_for_cast (tree dst_type, tree src_type)
542{
543 gcc_assert (dst_type);
544 if (!src_type)
545 return NOP_EXPR;
546
778aca1b 547 if (SCALAR_FLOAT_TYPE_P (src_type))
ecdb9322
DM
548 {
549 if (TREE_CODE (dst_type) == INTEGER_TYPE)
550 return FIX_TRUNC_EXPR;
551 else
552 return VIEW_CONVERT_EXPR;
553 }
554
555 return NOP_EXPR;
556}
557
808f4dfe
DM
558/* Return the svalue * for a cast of ARG to type TYPE, creating it
559 if necessary. */
560
561const svalue *
562region_model_manager::get_or_create_cast (tree type, const svalue *arg)
563{
366bd1ac 564 gcc_assert (type);
45b999f6
DM
565
566 /* No-op if the types are the same. */
567 if (type == arg->get_type ())
568 return arg;
569
570 /* Don't attempt to handle casts involving vector types for now. */
778aca1b 571 if (VECTOR_TYPE_P (type)
45b999f6 572 || (arg->get_type ()
778aca1b 573 && VECTOR_TYPE_P (arg->get_type ())))
45b999f6
DM
574 return get_or_create_unknown_svalue (type);
575
ecdb9322
DM
576 enum tree_code op = get_code_for_cast (type, arg->get_type ());
577 return get_or_create_unaryop (type, op, arg);
808f4dfe
DM
578}
579
ec3fafa9
DM
580/* Subroutine of region_model_manager::maybe_fold_binop for handling
581 (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
582 optimize_bit_field_compare, where CST is from ARG1.
583
584 Support masking out bits from a compound_svalue for comparing a bitfield
585 against a value, as generated by optimize_bit_field_compare for
586 BITFIELD == VALUE.
587
588 If COMPOUND_SVAL has a value for the appropriate bits, return it,
589 shifted accordingly.
590 Otherwise return NULL. */
591
592const svalue *
593region_model_manager::
594maybe_undo_optimize_bit_field_compare (tree type,
595 const compound_svalue *compound_sval,
596 tree cst,
597 const svalue *arg1)
598{
599 if (type != unsigned_char_type_node)
600 return NULL;
601
602 const binding_map &map = compound_sval->get_map ();
603 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
604 /* If "mask" is a contiguous range of set bits, see if the
605 compound_sval has a value for those bits. */
606 bit_range bits (0, 0);
607 if (!bit_range::from_mask (mask, &bits))
608 return NULL;
609
610 bit_range bound_bits (bits);
611 if (BYTES_BIG_ENDIAN)
612 bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
613 bits.m_size_in_bits);
614 const concrete_binding *conc
e61ffa20 615 = get_store_manager ()->get_concrete_binding (bound_bits);
ec3fafa9
DM
616 const svalue *sval = map.get (conc);
617 if (!sval)
618 return NULL;
619
620 /* We have a value;
621 shift it by the correct number of bits. */
622 const svalue *lhs = get_or_create_cast (type, sval);
623 HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
1aff29d4 624 const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
ec3fafa9
DM
625 const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
626 lhs, shift_sval);
627 /* Reapply the mask (needed for negative
628 signed bitfields). */
629 return get_or_create_binop (type, BIT_AND_EXPR,
630 shifted_sval, arg1);
631}
632
808f4dfe
DM
633/* Subroutine of region_model_manager::get_or_create_binop.
634 Attempt to fold the inputs and return a simpler svalue *.
635 Otherwise, return NULL. */
636
637const svalue *
638region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
639 const svalue *arg0,
640 const svalue *arg1)
641{
642 tree cst0 = arg0->maybe_get_constant ();
643 tree cst1 = arg1->maybe_get_constant ();
644 /* (CST OP CST). */
645 if (cst0 && cst1)
646 {
647 if (tree result = fold_binary (op, type, cst0, cst1))
648 if (CONSTANT_CLASS_P (result))
649 return get_or_create_constant_svalue (result);
650 }
651
0e466e97 652 if ((type && FLOAT_TYPE_P (type))
808f4dfe
DM
653 || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
654 || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
655 return NULL;
656
657 switch (op)
658 {
659 default:
660 break;
661 case POINTER_PLUS_EXPR:
662 case PLUS_EXPR:
663 /* (VAL + 0) -> VAL. */
3a32fb2e
DM
664 if (cst1 && zerop (cst1))
665 return get_or_create_cast (type, arg0);
808f4dfe
DM
666 break;
667 case MINUS_EXPR:
668 /* (VAL - 0) -> VAL. */
3a32fb2e
DM
669 if (cst1 && zerop (cst1))
670 return get_or_create_cast (type, arg0);
0b737090
DM
671 /* (0 - VAL) -> -VAL. */
672 if (cst0 && zerop (cst0))
673 return get_or_create_unaryop (type, NEGATE_EXPR, arg1);
0e466e97
DM
674 /* (X + Y) - X -> Y. */
675 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
676 if (binop->get_op () == PLUS_EXPR)
677 if (binop->get_arg0 () == arg1)
678 return get_or_create_cast (type, binop->get_arg1 ());
808f4dfe
DM
679 break;
680 case MULT_EXPR:
681 /* (VAL * 0). */
fc02b568 682 if (cst1 && zerop (cst1) && INTEGRAL_TYPE_P (type))
808f4dfe
DM
683 return get_or_create_constant_svalue (build_int_cst (type, 0));
684 /* (VAL * 1) -> VAL. */
685 if (cst1 && integer_onep (cst1))
021077b9
DM
686 /* TODO: we ought to have a cast to TYPE here, but doing so introduces
687 regressions; see PR analyzer/110902. */
808f4dfe
DM
688 return arg0;
689 break;
df2b78d4
DM
690 case BIT_AND_EXPR:
691 if (cst1)
d3b1ef7a
DM
692 {
693 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
694 /* "(ARG0 & 0)" -> "0". */
695 return get_or_create_constant_svalue (build_int_cst (type, 0));
696
d3b1ef7a
DM
697 if (const compound_svalue *compound_sval
698 = arg0->dyn_cast_compound_svalue ())
ec3fafa9
DM
699 if (const svalue *sval
700 = maybe_undo_optimize_bit_field_compare (type,
701 compound_sval,
702 cst1, arg1))
703 return sval;
d3b1ef7a 704 }
4f34f8cc
DM
705 if (arg0->get_type () == boolean_type_node
706 && arg1->get_type () == boolean_type_node)
707 {
708 /* If the LHS are both _Bool, then... */
709 /* ..."(1 & x) -> x". */
710 if (cst0 && !zerop (cst0))
711 return get_or_create_cast (type, arg1);
712 /* ..."(x & 1) -> x". */
713 if (cst1 && !zerop (cst1))
714 return get_or_create_cast (type, arg0);
715 /* ..."(0 & x) -> 0". */
716 if (cst0 && zerop (cst0))
717 return get_or_create_int_cst (type, 0);
718 /* ..."(x & 0) -> 0". */
719 if (cst1 && zerop (cst1))
720 return get_or_create_int_cst (type, 0);
721 }
722 break;
723 case BIT_IOR_EXPR:
724 if (arg0->get_type () == boolean_type_node
725 && arg1->get_type () == boolean_type_node)
726 {
727 /* If the LHS are both _Bool, then... */
728 /* ..."(1 | x) -> 1". */
729 if (cst0 && !zerop (cst0))
730 return get_or_create_int_cst (type, 1);
731 /* ..."(x | 1) -> 1". */
732 if (cst1 && !zerop (cst1))
733 return get_or_create_int_cst (type, 1);
734 /* ..."(0 | x) -> x". */
735 if (cst0 && zerop (cst0))
736 return get_or_create_cast (type, arg1);
737 /* ..."(x | 0) -> x". */
738 if (cst1 && zerop (cst1))
739 return get_or_create_cast (type, arg0);
740 }
df2b78d4 741 break;
808f4dfe
DM
742 case TRUTH_ANDIF_EXPR:
743 case TRUTH_AND_EXPR:
744 if (cst1)
745 {
fc02b568 746 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
808f4dfe
DM
747 /* "(ARG0 && 0)" -> "0". */
748 return get_or_create_constant_svalue (build_int_cst (type, 0));
749 else
750 /* "(ARG0 && nonzero-cst)" -> "ARG0". */
751 return get_or_create_cast (type, arg0);
752 }
753 break;
754 case TRUTH_ORIF_EXPR:
755 case TRUTH_OR_EXPR:
756 if (cst1)
757 {
758 if (zerop (cst1))
759 /* "(ARG0 || 0)" -> "ARG0". */
760 return get_or_create_cast (type, arg0);
761 else
762 /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
763 return get_or_create_cast (type, arg1);
764 }
765 break;
766 }
767
768 /* For associative ops, fold "(X op CST_A) op CST_B)" to
769 "X op (CST_A op CST_B)". */
770 if (cst1 && associative_tree_code (op))
771 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
772 if (binop->get_op () == op
0e466e97 773 && binop->get_arg1 ()->maybe_get_constant ())
808f4dfe
DM
774 return get_or_create_binop
775 (type, op, binop->get_arg0 (),
776 get_or_create_binop (type, op,
777 binop->get_arg1 (), arg1));
778
779 /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
780 can fold:
781 "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
782 e.g. in data-model-1.c: test_4c. */
783 if (cst1 && op == POINTER_PLUS_EXPR)
784 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
785 if (binop->get_op () == POINTER_PLUS_EXPR)
786 if (binop->get_arg1 ()->maybe_get_constant ())
787 return get_or_create_binop
788 (type, op, binop->get_arg0 (),
789 get_or_create_binop (size_type_node, op,
790 binop->get_arg1 (), arg1));
791
0e466e97
DM
792 /* Distribute multiplication by a constant through addition/subtraction:
793 (X + Y) * CST => (X * CST) + (Y * CST). */
794 if (cst1 && op == MULT_EXPR)
795 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
796 if (binop->get_op () == PLUS_EXPR
797 || binop->get_op () == MINUS_EXPR)
798 {
799 return get_or_create_binop
800 (type, binop->get_op (),
801 get_or_create_binop (type, op,
802 binop->get_arg0 (), arg1),
803 get_or_create_binop (type, op,
804 binop->get_arg1 (), arg1));
805 }
806
808f4dfe
DM
807 /* etc. */
808
809 return NULL;
810}
811
812/* Return the svalue * for an binary operation OP on ARG0 and ARG1
813 with a result of type TYPE, creating it if necessary. */
814
815const svalue *
816region_model_manager::get_or_create_binop (tree type, enum tree_code op,
817 const svalue *arg0,
818 const svalue *arg1)
819{
820 /* For commutative ops, put any constant on the RHS. */
821 if (arg0->maybe_get_constant () && commutative_tree_code (op))
822 std::swap (arg0, arg1);
823
824 if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
825 return folded;
826
a113b143
DM
827 /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
828 it via an identity in maybe_fold_binop). */
829 if (!arg0->can_have_associated_state_p ()
830 || !arg1->can_have_associated_state_p ())
831 return get_or_create_unknown_svalue (type);
832
808f4dfe
DM
833 binop_svalue::key_t key (type, op, arg0, arg1);
834 if (binop_svalue **slot = m_binop_values_map.get (key))
835 return *slot;
9d804f9b
DM
836 binop_svalue *binop_sval
837 = new binop_svalue (alloc_symbol_id (), type, op, arg0, arg1);
808f4dfe
DM
838 RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
839 m_binop_values_map.put (key, binop_sval);
840 return binop_sval;
841}
842
843/* Subroutine of region_model_manager::get_or_create_sub_svalue.
844 Return a folded svalue, or NULL. */
845
846const svalue *
847region_model_manager::maybe_fold_sub_svalue (tree type,
848 const svalue *parent_svalue,
849 const region *subregion)
850{
a113b143
DM
851 /* Subvalues of "unknown"/"poisoned" are unknown. */
852 if (!parent_svalue->can_have_associated_state_p ())
808f4dfe
DM
853 return get_or_create_unknown_svalue (type);
854
855 /* If we have a subregion of a zero-fill, it's zero. */
856 if (const unaryop_svalue *unary
857 = parent_svalue->dyn_cast_unaryop_svalue ())
858 {
ecdb9322
DM
859 if (unary->get_op () == NOP_EXPR
860 || unary->get_op () == VIEW_CONVERT_EXPR)
808f4dfe 861 if (tree cst = unary->get_arg ()->maybe_get_constant ())
84832cab 862 if (zerop (cst) && type)
808f4dfe
DM
863 {
864 const svalue *cst_sval
865 = get_or_create_constant_svalue (cst);
866 return get_or_create_cast (type, cst_sval);
867 }
868 }
869
870 /* Handle getting individual chars from a STRING_CST. */
871 if (tree cst = parent_svalue->maybe_get_constant ())
872 if (TREE_CODE (cst) == STRING_CST)
2ac7b19f
DM
873 {
874 /* If we have a concrete 1-byte access within the parent region... */
875 byte_range subregion_bytes (0, 0);
876 if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
84832cab
DM
877 && subregion_bytes.m_size_in_bytes == 1
878 && type)
2ac7b19f
DM
879 {
880 /* ...then attempt to get that char from the STRING_CST. */
881 HOST_WIDE_INT hwi_start_byte
882 = subregion_bytes.m_start_byte_offset.to_shwi ();
883 tree cst_idx
884 = build_int_cst_type (size_type_node, hwi_start_byte);
808f4dfe
DM
885 if (const svalue *char_sval
886 = maybe_get_char_from_string_cst (cst, cst_idx))
887 return get_or_create_cast (type, char_sval);
2ac7b19f
DM
888 }
889 }
808f4dfe 890
808f4dfe 891 if (const initial_svalue *init_sval
e61ffa20 892 = parent_svalue->dyn_cast_initial_svalue ())
808f4dfe 893 {
e61ffa20
DM
894 /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
895 i.e.
896 Subvalue(InitialValue(R1), FieldRegion(R2, F))
897 -> InitialValue(FieldRegion(R1, F)). */
808f4dfe
DM
898 if (const field_region *field_reg = subregion->dyn_cast_field_region ())
899 {
900 const region *field_reg_new
901 = get_field_region (init_sval->get_region (),
902 field_reg->get_field ());
903 return get_or_create_initial_value (field_reg_new);
904 }
e61ffa20
DM
905 /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
906 i.e.
907 Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
908 -> InitialValue(ElementRegion(R1, IDX)). */
909 if (const element_region *element_reg = subregion->dyn_cast_element_region ())
910 {
911 const region *element_reg_new
912 = get_element_region (init_sval->get_region (),
913 element_reg->get_type (),
914 element_reg->get_index ());
915 return get_or_create_initial_value (element_reg_new);
916 }
808f4dfe
DM
917 }
918
e61ffa20
DM
919 if (const repeated_svalue *repeated_sval
920 = parent_svalue->dyn_cast_repeated_svalue ())
79e746bb
DM
921 if (type)
922 return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
e61ffa20 923
808f4dfe
DM
924 return NULL;
925}
926
927/* Return the svalue * for extracting a subvalue of type TYPE from
928 PARENT_SVALUE based on SUBREGION, creating it if necessary. */
929
930const svalue *
931region_model_manager::get_or_create_sub_svalue (tree type,
932 const svalue *parent_svalue,
933 const region *subregion)
934{
935 if (const svalue *folded
936 = maybe_fold_sub_svalue (type, parent_svalue, subregion))
937 return folded;
938
939 sub_svalue::key_t key (type, parent_svalue, subregion);
940 if (sub_svalue **slot = m_sub_values_map.get (key))
941 return *slot;
942 sub_svalue *sub_sval
9d804f9b 943 = new sub_svalue (alloc_symbol_id (), type, parent_svalue, subregion);
808f4dfe
DM
944 RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
945 m_sub_values_map.put (key, sub_sval);
946 return sub_sval;
947}
948
e61ffa20
DM
949/* Subroutine of region_model_manager::get_or_create_repeated_svalue.
950 Return a folded svalue, or NULL. */
951
952const svalue *
953region_model_manager::maybe_fold_repeated_svalue (tree type,
954 const svalue *outer_size,
955 const svalue *inner_svalue)
956{
a113b143
DM
957 /* Repeated "unknown"/"poisoned" is unknown. */
958 if (!outer_size->can_have_associated_state_p ()
959 || !inner_svalue->can_have_associated_state_p ())
960 return get_or_create_unknown_svalue (type);
961
e61ffa20
DM
962 /* If INNER_SVALUE is the same size as OUTER_SIZE,
963 turn into simply a cast. */
964 if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
965 {
966 HOST_WIDE_INT num_bytes_inner_svalue
967 = int_size_in_bytes (inner_svalue->get_type ());
968 if (num_bytes_inner_svalue != -1)
969 if (num_bytes_inner_svalue
970 == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
971 {
972 if (type)
973 return get_or_create_cast (type, inner_svalue);
974 else
975 return inner_svalue;
976 }
977 }
978
979 /* Handle zero-fill of a specific type. */
980 if (tree cst = inner_svalue->maybe_get_constant ())
981 if (zerop (cst) && type)
982 return get_or_create_cast (type, inner_svalue);
983
984 return NULL;
985}
986
987/* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
988 enough times to be of size OUTER_SIZE, creating it if necessary.
989 e.g. for filling buffers with a constant value. */
990
991const svalue *
992region_model_manager::get_or_create_repeated_svalue (tree type,
993 const svalue *outer_size,
994 const svalue *inner_svalue)
995{
996 if (const svalue *folded
997 = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
998 return folded;
999
1000 repeated_svalue::key_t key (type, outer_size, inner_svalue);
1001 if (repeated_svalue **slot = m_repeated_values_map.get (key))
1002 return *slot;
1003 repeated_svalue *repeated_sval
9d804f9b 1004 = new repeated_svalue (alloc_symbol_id (), type, outer_size, inner_svalue);
e61ffa20
DM
1005 RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
1006 m_repeated_values_map.put (key, repeated_sval);
1007 return repeated_sval;
1008}
1009
1010/* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
1011 Return true and write the result to OUT if successful.
1012 Return false otherwise. */
1013
1014static bool
1015get_bit_range_for_field (tree field, bit_range *out)
1016{
1017 bit_size_t bit_size;
1018 if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
1019 return false;
1020 int field_bit_offset = int_bit_position (field);
1021 *out = bit_range (field_bit_offset, bit_size);
1022 return true;
1023}
1024
1025/* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
1026 Return true and write the result to OUT if successful.
1027 Return false otherwise. */
1028
1029static bool
1030get_byte_range_for_field (tree field, byte_range *out)
1031{
1032 bit_range field_bits (0, 0);
1033 if (!get_bit_range_for_field (field, &field_bits))
1034 return false;
1035 return field_bits.as_byte_range (out);
1036}
1037
1038/* Attempt to determine if there is a specific field within RECORD_TYPE
1039 at BYTES. If so, return it, and write the location of BYTES relative
1040 to the field to *OUT_RANGE_WITHIN_FIELD.
1041 Otherwise, return NULL_TREE.
1042 For example, given:
1043 struct foo { uint32 a; uint32; b};
1044 and
1045 bytes = {bytes 6-7} (of foo)
1046 we have bytes 3-4 of field b. */
1047
1048static tree
1049get_field_at_byte_range (tree record_type, const byte_range &bytes,
1050 byte_range *out_range_within_field)
1051{
1052 bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
1053
1054 tree field = get_field_at_bit_offset (record_type, bit_offset);
1055 if (!field)
1056 return NULL_TREE;
1057
1058 byte_range field_bytes (0,0);
1059 if (!get_byte_range_for_field (field, &field_bytes))
1060 return NULL_TREE;
1061
1062 /* Is BYTES fully within field_bytes? */
1063 byte_range bytes_within_field (0,0);
1064 if (!field_bytes.contains_p (bytes, &bytes_within_field))
1065 return NULL_TREE;
1066
1067 *out_range_within_field = bytes_within_field;
1068 return field;
1069}
1070
1071/* Subroutine of region_model_manager::get_or_create_bits_within.
1072 Return a folded svalue, or NULL. */
1073
1074const svalue *
1075region_model_manager::maybe_fold_bits_within_svalue (tree type,
1076 const bit_range &bits,
1077 const svalue *inner_svalue)
1078{
1079 tree inner_type = inner_svalue->get_type ();
1080 /* Fold:
1081 BITS_WITHIN ((0, sizeof (VAL), VAL))
1082 to:
1083 CAST(TYPE, VAL). */
1084 if (bits.m_start_bit_offset == 0 && inner_type)
1085 {
1086 bit_size_t inner_type_size;
1087 if (int_size_in_bits (inner_type, &inner_type_size))
1088 if (inner_type_size == bits.m_size_in_bits)
1089 {
1090 if (type)
1091 return get_or_create_cast (type, inner_svalue);
1092 else
1093 return inner_svalue;
1094 }
1095 }
1096
1097 /* Kind-specific folding. */
1098 if (const svalue *sval
1099 = inner_svalue->maybe_fold_bits_within (type, bits, this))
1100 return sval;
1101
1102 byte_range bytes (0,0);
1103 if (bits.as_byte_range (&bytes) && inner_type)
1104 switch (TREE_CODE (inner_type))
1105 {
1106 default:
1107 break;
1108 case ARRAY_TYPE:
1109 {
1110 /* Fold:
1111 BITS_WITHIN (range, KIND(REG))
1112 to:
1113 BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1114 if range1 is a byte-range fully within one ELEMENT. */
1115 tree element_type = TREE_TYPE (inner_type);
1116 HOST_WIDE_INT element_byte_size
1117 = int_size_in_bytes (element_type);
1118 if (element_byte_size > 0)
1119 {
1120 HOST_WIDE_INT start_idx
1121 = (bytes.get_start_byte_offset ().to_shwi ()
1122 / element_byte_size);
1123 HOST_WIDE_INT last_idx
1124 = (bytes.get_last_byte_offset ().to_shwi ()
1125 / element_byte_size);
1126 if (start_idx == last_idx)
1127 {
1128 if (const initial_svalue *initial_sval
1129 = inner_svalue->dyn_cast_initial_svalue ())
1130 {
1131 bit_offset_t start_of_element
1132 = start_idx * element_byte_size * BITS_PER_UNIT;
1133 bit_range bits_within_element
1134 (bits.m_start_bit_offset - start_of_element,
1135 bits.m_size_in_bits);
1136 const svalue *idx_sval
1137 = get_or_create_int_cst (integer_type_node, start_idx);
1138 const region *element_reg =
1139 get_element_region (initial_sval->get_region (),
1140 element_type, idx_sval);
1141 const svalue *element_reg_sval
1142 = get_or_create_initial_value (element_reg);
1143 return get_or_create_bits_within (type,
1144 bits_within_element,
1145 element_reg_sval);
1146 }
1147 }
1148 }
1149 }
1150 break;
1151 case RECORD_TYPE:
1152 {
1153 /* Fold:
1154 BYTES_WITHIN (range, KIND(REG))
1155 to:
1156 BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1157 if range1 is fully within FIELD. */
1158 byte_range bytes_within_field (0, 0);
1159 if (tree field = get_field_at_byte_range (inner_type, bytes,
1160 &bytes_within_field))
1161 {
1162 if (const initial_svalue *initial_sval
1163 = inner_svalue->dyn_cast_initial_svalue ())
1164 {
1165 const region *field_reg =
1166 get_field_region (initial_sval->get_region (), field);
1167 const svalue *initial_reg_sval
1168 = get_or_create_initial_value (field_reg);
1169 return get_or_create_bits_within
1170 (type,
1171 bytes_within_field.as_bit_range (),
1172 initial_reg_sval);
1173 }
1174 }
1175 }
1176 break;
1177 }
1178 return NULL;
1179}
1180
1181/* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1182 creating it if necessary. */
1183
1184const svalue *
1185region_model_manager::get_or_create_bits_within (tree type,
1186 const bit_range &bits,
1187 const svalue *inner_svalue)
1188{
1189 if (const svalue *folded
1190 = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1191 return folded;
1192
1193 bits_within_svalue::key_t key (type, bits, inner_svalue);
1194 if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1195 return *slot;
1196 bits_within_svalue *bits_within_sval
9d804f9b 1197 = new bits_within_svalue (alloc_symbol_id (), type, bits, inner_svalue);
e61ffa20
DM
1198 RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1199 m_bits_within_values_map.put (key, bits_within_sval);
1200 return bits_within_sval;
1201}
1202
808f4dfe
DM
1203/* Return the svalue * that decorates ARG as being unmergeable,
1204 creating it if necessary. */
1205
1206const svalue *
1207region_model_manager::get_or_create_unmergeable (const svalue *arg)
1208{
1209 if (arg->get_kind () == SK_UNMERGEABLE)
1210 return arg;
1211
1212 if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1213 return *slot;
9d804f9b
DM
1214 unmergeable_svalue *unmergeable_sval
1215 = new unmergeable_svalue (alloc_symbol_id (), arg);
808f4dfe
DM
1216 RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1217 m_unmergeable_values_map.put (arg, unmergeable_sval);
1218 return unmergeable_sval;
1219}
1220
1221/* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1222 and ITER_SVAL at POINT, creating it if necessary. */
1223
1224const svalue *
e6fe02d8
DM
1225region_model_manager::
1226get_or_create_widening_svalue (tree type,
1227 const function_point &point,
1228 const svalue *base_sval,
1229 const svalue *iter_sval)
808f4dfe 1230{
2fc20138
DM
1231 gcc_assert (base_sval->get_kind () != SK_WIDENING);
1232 gcc_assert (iter_sval->get_kind () != SK_WIDENING);
808f4dfe
DM
1233 widening_svalue::key_t key (type, point, base_sval, iter_sval);
1234 if (widening_svalue **slot = m_widening_values_map.get (key))
1235 return *slot;
1236 widening_svalue *widening_sval
9d804f9b
DM
1237 = new widening_svalue (alloc_symbol_id (), type, point, base_sval,
1238 iter_sval);
808f4dfe
DM
1239 RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1240 m_widening_values_map.put (key, widening_sval);
1241 return widening_sval;
1242}
1243
1244/* Return the svalue * of type TYPE for the compound values in MAP,
1245 creating it if necessary. */
1246
1247const svalue *
1248region_model_manager::get_or_create_compound_svalue (tree type,
1249 const binding_map &map)
1250{
1251 compound_svalue::key_t tmp_key (type, &map);
1252 if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1253 return *slot;
1254 compound_svalue *compound_sval
9d804f9b 1255 = new compound_svalue (alloc_symbol_id (), type, map);
808f4dfe
DM
1256 RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1257 /* Use make_key rather than reusing the key, so that we use a
1258 ptr to compound_sval's binding_map, rather than the MAP param. */
1259 m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1260 return compound_sval;
1261}
1262
3734527d
DM
1263/* class conjured_purge. */
1264
1265/* Purge state relating to SVAL. */
1266
1267void
1268conjured_purge::purge (const conjured_svalue *sval) const
1269{
1270 m_model->purge_state_involving (sval, m_ctxt);
1271}
1272
808f4dfe 1273/* Return the svalue * of type TYPE for the value conjured for ID_REG
f65f63c4
DM
1274 at STMT (using IDX for any further disambiguation),
1275 creating it if necessary.
3734527d
DM
1276 Use P to purge existing state from the svalue, for the case where a
1277 conjured_svalue would be reused along an execution path. */
808f4dfe
DM
1278
1279const svalue *
1280region_model_manager::get_or_create_conjured_svalue (tree type,
1281 const gimple *stmt,
3734527d 1282 const region *id_reg,
f65f63c4
DM
1283 const conjured_purge &p,
1284 unsigned idx)
808f4dfe 1285{
f65f63c4 1286 conjured_svalue::key_t key (type, stmt, id_reg, idx);
808f4dfe 1287 if (conjured_svalue **slot = m_conjured_values_map.get (key))
3734527d
DM
1288 {
1289 const conjured_svalue *sval = *slot;
1290 /* We're reusing an existing conjured_svalue, perhaps from a different
1291 state within this analysis, or perhaps from an earlier state on this
1292 execution path. For the latter, purge any state involving the "new"
1293 svalue from the current program_state. */
1294 p.purge (sval);
1295 return sval;
1296 }
808f4dfe 1297 conjured_svalue *conjured_sval
f65f63c4 1298 = new conjured_svalue (alloc_symbol_id (), type, stmt, id_reg, idx);
808f4dfe
DM
1299 RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1300 m_conjured_values_map.put (key, conjured_sval);
1301 return conjured_sval;
1302}
1303
ded2c2c0
DM
1304/* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1305 Return a folded svalue, or NULL. */
1306
1307const svalue *
1308region_model_manager::
1309maybe_fold_asm_output_svalue (tree type,
1310 const vec<const svalue *> &inputs)
1311{
1312 /* Unknown inputs should lead to unknown results. */
1313 for (const auto &iter : inputs)
1314 if (iter->get_kind () == SK_UNKNOWN)
1315 return get_or_create_unknown_svalue (type);
1316
1317 return NULL;
1318}
1319
1320/* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1321 asm stmt ASM_STMT, given INPUTS as inputs. */
1322
1323const svalue *
1324region_model_manager::
1325get_or_create_asm_output_svalue (tree type,
1326 const gasm *asm_stmt,
1327 unsigned output_idx,
1328 const vec<const svalue *> &inputs)
1329{
1330 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1331
1332 if (const svalue *folded
1333 = maybe_fold_asm_output_svalue (type, inputs))
1334 return folded;
1335
1336 const char *asm_string = gimple_asm_string (asm_stmt);
1337 const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1338
1339 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1340 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1341 return *slot;
1342 asm_output_svalue *asm_output_sval
9d804f9b
DM
1343 = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1344 noutputs, inputs);
ded2c2c0
DM
1345 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1346 m_asm_output_values_map.put (key, asm_output_sval);
1347 return asm_output_sval;
1348}
1349
bfca9505
DM
1350/* Return the svalue * of type TYPE for OUTPUT_IDX of a deterministic
1351 asm stmt with string ASM_STRING with NUM_OUTPUTS outputs, given
1352 INPUTS as inputs. */
1353
1354const svalue *
1355region_model_manager::
1356get_or_create_asm_output_svalue (tree type,
1357 const char *asm_string,
1358 unsigned output_idx,
1359 unsigned num_outputs,
1360 const vec<const svalue *> &inputs)
1361{
1362 gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1363
1364 if (const svalue *folded
1365 = maybe_fold_asm_output_svalue (type, inputs))
1366 return folded;
1367
1368 asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1369 if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1370 return *slot;
1371 asm_output_svalue *asm_output_sval
9d804f9b
DM
1372 = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1373 num_outputs, inputs);
bfca9505
DM
1374 RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1375 m_asm_output_values_map.put (key, asm_output_sval);
1376 return asm_output_sval;
1377}
aee1adf2
DM
1378
1379/* Return the svalue * of type TYPE for the result of a call to FNDECL
1380 with __attribute__((const)), given INPUTS as inputs. */
1381
1382const svalue *
1383region_model_manager::
1384get_or_create_const_fn_result_svalue (tree type,
1385 tree fndecl,
1386 const vec<const svalue *> &inputs)
1387{
1388 gcc_assert (type);
1389 gcc_assert (fndecl);
1390 gcc_assert (DECL_P (fndecl));
1391 gcc_assert (TREE_READONLY (fndecl));
1392 gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1393
1394 const_fn_result_svalue::key_t key (type, fndecl, inputs);
1395 if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1396 return *slot;
1397 const_fn_result_svalue *const_fn_result_sval
9d804f9b 1398 = new const_fn_result_svalue (alloc_symbol_id (), type, fndecl, inputs);
aee1adf2
DM
1399 RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1400 m_const_fn_result_values_map.put (key, const_fn_result_sval);
1401 return const_fn_result_sval;
1402}
1403
808f4dfe
DM
1404/* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1405 attempt to get the character at that offset, returning either
1406 the svalue for the character constant, or NULL if unsuccessful. */
1407
1408const svalue *
1409region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1410 tree byte_offset_cst)
1411{
1412 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1413
1414 /* Adapted from fold_read_from_constant_string. */
1415 scalar_int_mode char_mode;
1416 if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1417 && compare_tree_int (byte_offset_cst,
1418 TREE_STRING_LENGTH (string_cst)) < 0
1419 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1420 &char_mode)
1421 && GET_MODE_SIZE (char_mode) == 1)
1422 {
1423 tree char_cst
1424 = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)),
1425 (TREE_STRING_POINTER (string_cst)
1426 [TREE_INT_CST_LOW (byte_offset_cst)]));
1427 return get_or_create_constant_svalue (char_cst);
1428 }
1429 return NULL;
1430}
1431
1432/* region consolidation. */
1433
1434/* Return the region for FNDECL, creating it if necessary. */
1435
1436const function_region *
1437region_model_manager::get_region_for_fndecl (tree fndecl)
1438{
1439 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1440
1441 function_region **slot = m_fndecls_map.get (fndecl);
1442 if (slot)
1443 return *slot;
1444 function_region *reg
9d804f9b 1445 = new function_region (alloc_symbol_id (), &m_code_region, fndecl);
808f4dfe
DM
1446 m_fndecls_map.put (fndecl, reg);
1447 return reg;
1448}
1449
1450/* Return the region for LABEL, creating it if necessary. */
1451
1452const label_region *
1453region_model_manager::get_region_for_label (tree label)
1454{
1455 gcc_assert (TREE_CODE (label) == LABEL_DECL);
1456
1457 label_region **slot = m_labels_map.get (label);
1458 if (slot)
1459 return *slot;
1460
1461 tree fndecl = DECL_CONTEXT (label);
1462 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1463
1464 const function_region *func_reg = get_region_for_fndecl (fndecl);
1465 label_region *reg
9d804f9b 1466 = new label_region (alloc_symbol_id (), func_reg, label);
808f4dfe
DM
1467 m_labels_map.put (label, reg);
1468 return reg;
1469}
1470
1471/* Return the region for EXPR, creating it if necessary. */
1472
1473const decl_region *
1474region_model_manager::get_region_for_global (tree expr)
1475{
778aca1b 1476 gcc_assert (VAR_P (expr));
808f4dfe
DM
1477
1478 decl_region **slot = m_globals_map.get (expr);
1479 if (slot)
1480 return *slot;
1481 decl_region *reg
9d804f9b 1482 = new decl_region (alloc_symbol_id (), &m_globals_region, expr);
808f4dfe
DM
1483 m_globals_map.put (expr, reg);
1484 return reg;
1485}
1486
3d41408c
DM
1487/* Return the region for an unknown access of type REGION_TYPE,
1488 creating it if necessary.
1489 This is a symbolic_region, where the pointer is an unknown_svalue
1490 of type &REGION_TYPE. */
1491
1492const region *
1493region_model_manager::get_unknown_symbolic_region (tree region_type)
1494{
1495 tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1496 const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1497 return get_symbolic_region (unknown_ptr);
1498}
1499
808f4dfe
DM
1500/* Return the region that describes accessing field FIELD of PARENT,
1501 creating it if necessary. */
1502
1503const region *
1504region_model_manager::get_field_region (const region *parent, tree field)
1505{
00cb0f58
DM
1506 gcc_assert (TREE_CODE (field) == FIELD_DECL);
1507
11d4ec5d
DM
1508 /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1509 if (parent->symbolic_for_unknown_ptr_p ())
3d41408c 1510 return get_unknown_symbolic_region (TREE_TYPE (field));
11d4ec5d 1511
808f4dfe
DM
1512 field_region::key_t key (parent, field);
1513 if (field_region *reg = m_field_regions.get (key))
1514 return reg;
1515
1516 field_region *field_reg
9d804f9b 1517 = new field_region (alloc_symbol_id (), parent, field);
808f4dfe
DM
1518 m_field_regions.put (key, field_reg);
1519 return field_reg;
1520}
1521
1522/* Return the region that describes accessing the element of type
1523 ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1524
1525const region *
1526region_model_manager::get_element_region (const region *parent,
1527 tree element_type,
1528 const svalue *index)
1529{
3d41408c
DM
1530 /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1531 if (parent->symbolic_for_unknown_ptr_p ())
1532 return get_unknown_symbolic_region (element_type);
1533
808f4dfe
DM
1534 element_region::key_t key (parent, element_type, index);
1535 if (element_region *reg = m_element_regions.get (key))
1536 return reg;
1537
1538 element_region *element_reg
9d804f9b 1539 = new element_region (alloc_symbol_id (), parent, element_type, index);
808f4dfe
DM
1540 m_element_regions.put (key, element_reg);
1541 return element_reg;
1542}
1543
1544/* Return the region that describes accessing the subregion of type
1545 ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1546 necessary. */
1547
1548const region *
1549region_model_manager::get_offset_region (const region *parent,
1550 tree type,
1551 const svalue *byte_offset)
1552{
3d41408c
DM
1553 /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1554 if (parent->symbolic_for_unknown_ptr_p ())
1555 return get_unknown_symbolic_region (type);
1556
808f4dfe
DM
1557 /* If BYTE_OFFSET is zero, return PARENT. */
1558 if (tree cst_offset = byte_offset->maybe_get_constant ())
1559 if (zerop (cst_offset))
1560 return get_cast_region (parent, type);
1561
1562 /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1563 to OFFSET_REGION(REG, (X + Y)). */
1564 if (const offset_region *parent_offset_reg
1565 = parent->dyn_cast_offset_region ())
1566 {
1567 const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1568 const svalue *sval_sum
1569 = get_or_create_binop (byte_offset->get_type (),
1570 PLUS_EXPR, sval_x, byte_offset);
1571 return get_offset_region (parent->get_parent_region (), type, sval_sum);
1572 }
1573
1574 offset_region::key_t key (parent, type, byte_offset);
1575 if (offset_region *reg = m_offset_regions.get (key))
1576 return reg;
1577
1578 offset_region *offset_reg
9d804f9b 1579 = new offset_region (alloc_symbol_id (), parent, type, byte_offset);
808f4dfe
DM
1580 m_offset_regions.put (key, offset_reg);
1581 return offset_reg;
1582}
1583
e61ffa20
DM
1584/* Return the region that describes accessing the subregion of type
1585 TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1586
1587const region *
1588region_model_manager::get_sized_region (const region *parent,
1589 tree type,
1590 const svalue *byte_size_sval)
1591{
3d41408c
DM
1592 if (parent->symbolic_for_unknown_ptr_p ())
1593 return get_unknown_symbolic_region (type);
1594
e61ffa20
DM
1595 if (byte_size_sval->get_type () != size_type_node)
1596 byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1597
1598 /* If PARENT is already that size, return it. */
1599 const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1600 if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1601 if (tree size_cst = byte_size_sval->maybe_get_constant ())
1602 {
1603 tree comparison
1604 = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1605 if (comparison == boolean_true_node)
1606 return parent;
1607 }
1608
1609 sized_region::key_t key (parent, type, byte_size_sval);
1610 if (sized_region *reg = m_sized_regions.get (key))
1611 return reg;
1612
1613 sized_region *sized_reg
9d804f9b 1614 = new sized_region (alloc_symbol_id (), parent, type, byte_size_sval);
e61ffa20
DM
1615 m_sized_regions.put (key, sized_reg);
1616 return sized_reg;
1617}
1618
808f4dfe
DM
1619/* Return the region that describes accessing PARENT_REGION as if
1620 it were of type TYPE, creating it if necessary. */
1621
1622const region *
1623region_model_manager::get_cast_region (const region *original_region,
1624 tree type)
1625{
1626 /* If types match, return ORIGINAL_REGION. */
1627 if (type == original_region->get_type ())
1628 return original_region;
1629
3d41408c
DM
1630 if (original_region->symbolic_for_unknown_ptr_p ())
1631 return get_unknown_symbolic_region (type);
1632
808f4dfe
DM
1633 cast_region::key_t key (original_region, type);
1634 if (cast_region *reg = m_cast_regions.get (key))
1635 return reg;
1636
1637 cast_region *cast_reg
9d804f9b 1638 = new cast_region (alloc_symbol_id (), original_region, type);
808f4dfe
DM
1639 m_cast_regions.put (key, cast_reg);
1640 return cast_reg;
1641}
1642
1643/* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1644 if necessary. CALLING_FRAME may be NULL. */
1645
1646const frame_region *
1647region_model_manager::get_frame_region (const frame_region *calling_frame,
1648 function *fun)
1649{
1650 int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1651
1652 frame_region::key_t key (calling_frame, fun);
1653 if (frame_region *reg = m_frame_regions.get (key))
1654 return reg;
1655
1656 frame_region *frame_reg
9d804f9b 1657 = new frame_region (alloc_symbol_id (), &m_stack_region, calling_frame,
808f4dfe
DM
1658 fun, index);
1659 m_frame_regions.put (key, frame_reg);
1660 return frame_reg;
1661}
1662
1663/* Return the region that describes dereferencing SVAL, creating it
1664 if necessary. */
1665
1666const region *
1667region_model_manager::get_symbolic_region (const svalue *sval)
1668{
1669 symbolic_region::key_t key (&m_root_region, sval);
1670 if (symbolic_region *reg = m_symbolic_regions.get (key))
1671 return reg;
1672
1673 symbolic_region *symbolic_reg
9d804f9b 1674 = new symbolic_region (alloc_symbol_id (), &m_root_region, sval);
808f4dfe
DM
1675 m_symbolic_regions.put (key, symbolic_reg);
1676 return symbolic_reg;
1677}
1678
1679/* Return the region that describes accessing STRING_CST, creating it
1680 if necessary. */
1681
1682const string_region *
1683region_model_manager::get_region_for_string (tree string_cst)
1684{
1685 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1686
1687 string_region **slot = m_string_map.get (string_cst);
1688 if (slot)
1689 return *slot;
1690 string_region *reg
9d804f9b 1691 = new string_region (alloc_symbol_id (), &m_root_region, string_cst);
808f4dfe
DM
1692 m_string_map.put (string_cst, reg);
1693 return reg;
1694}
1695
93e759fc
DM
1696/* Return the region that describes accessing BITS within PARENT as TYPE,
1697 creating it if necessary. */
1698
1699const region *
1700region_model_manager::get_bit_range (const region *parent, tree type,
1701 const bit_range &bits)
1702{
1703 gcc_assert (parent);
1704
3d41408c
DM
1705 if (parent->symbolic_for_unknown_ptr_p ())
1706 return get_unknown_symbolic_region (type);
1707
93e759fc
DM
1708 bit_range_region::key_t key (parent, type, bits);
1709 if (bit_range_region *reg = m_bit_range_regions.get (key))
1710 return reg;
1711
1712 bit_range_region *bit_range_reg
9d804f9b 1713 = new bit_range_region (alloc_symbol_id (), parent, type, bits);
93e759fc
DM
1714 m_bit_range_regions.put (key, bit_range_reg);
1715 return bit_range_reg;
1716}
1717
2402dc6b
DM
1718/* Return the region that describes accessing the IDX-th variadic argument
1719 within PARENT_FRAME, creating it if necessary. */
1720
1721const var_arg_region *
1722region_model_manager::get_var_arg_region (const frame_region *parent_frame,
1723 unsigned idx)
1724{
1725 gcc_assert (parent_frame);
1726
1727 var_arg_region::key_t key (parent_frame, idx);
1728 if (var_arg_region *reg = m_var_arg_regions.get (key))
1729 return reg;
1730
1731 var_arg_region *var_arg_reg
9d804f9b 1732 = new var_arg_region (alloc_symbol_id (), parent_frame, idx);
2402dc6b
DM
1733 m_var_arg_regions.put (key, var_arg_reg);
1734 return var_arg_reg;
1735}
1736
808f4dfe
DM
1737/* If we see a tree code we don't know how to handle, rather than
1738 ICE or generate bogus results, create a dummy region, and notify
1739 CTXT so that it can mark the new state as being not properly
1740 modelled. The exploded graph can then stop exploring that path,
1741 since any diagnostics we might issue will have questionable
1742 validity. */
1743
1744const region *
1745region_model_manager::
1746get_region_for_unexpected_tree_code (region_model_context *ctxt,
1747 tree t,
1748 const dump_location_t &loc)
1749{
808f4dfe
DM
1750 tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1751 region *new_reg
9d804f9b 1752 = new unknown_region (alloc_symbol_id (), &m_root_region, type);
b00a8304
DM
1753 if (ctxt)
1754 ctxt->on_unexpected_tree_code (t, loc);
808f4dfe
DM
1755 return new_reg;
1756}
1757
ce917b04
DM
1758/* Return a region describing a heap-allocated block of memory.
1759 Reuse an existing heap_allocated_region is its id is not within
1760 BASE_REGS_IN_USE. */
808f4dfe
DM
1761
1762const region *
ce917b04 1763region_model_manager::
7dc0ecaf 1764get_or_create_region_for_heap_alloc (const bitmap &base_regs_in_use)
808f4dfe 1765{
ce917b04
DM
1766 /* Try to reuse an existing region, if it's unreferenced in the
1767 client state. */
1768 for (auto existing_reg : m_managed_dynamic_regions)
1769 if (!bitmap_bit_p (base_regs_in_use, existing_reg->get_id ()))
1770 if (existing_reg->get_kind () == RK_HEAP_ALLOCATED)
1771 return existing_reg;
1772
1773 /* All existing ones (if any) are in use; create a new one. */
808f4dfe 1774 region *reg
9d804f9b 1775 = new heap_allocated_region (alloc_symbol_id (), &m_heap_region);
808f4dfe
DM
1776 m_managed_dynamic_regions.safe_push (reg);
1777 return reg;
1778}
1779
1780/* Return a new region describing a block of memory allocated within FRAME. */
1781
1782const region *
1783region_model_manager::create_region_for_alloca (const frame_region *frame)
1784{
1785 gcc_assert (frame);
9d804f9b 1786 region *reg = new alloca_region (alloc_symbol_id (), frame);
808f4dfe
DM
1787 m_managed_dynamic_regions.safe_push (reg);
1788 return reg;
1789}
1790
1791/* Log OBJ to LOGGER. */
1792
1793template <typename T>
1794static void
1795log_managed_object (logger *logger, const T *obj)
1796{
1797 logger->start_log_line ();
1798 pretty_printer *pp = logger->get_printer ();
1799 pp_string (pp, " ");
1800 obj->dump_to_pp (pp, true);
1801 logger->end_log_line ();
1802}
1803
1804/* Specialization for frame_region, which also logs the count of locals
1805 managed by the frame_region. */
1806
1807template <>
1808void
1809log_managed_object (logger *logger, const frame_region *obj)
1810{
1811 logger->start_log_line ();
1812 pretty_printer *pp = logger->get_printer ();
1813 pp_string (pp, " ");
1814 obj->dump_to_pp (pp, true);
1815 pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
1816 logger->end_log_line ();
1817}
1818
1819/* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1820 If SHOW_OBJS is true, also dump the objects themselves. */
1821
1822template <typename K, typename T>
1823static void
1824log_uniq_map (logger *logger, bool show_objs, const char *title,
1825 const hash_map<K, T*> &uniq_map)
1826{
3989337e 1827 logger->log (" # %s: %li", title, (long)uniq_map.elements ());
b0702ac5
DM
1828 if (!show_objs)
1829 return;
1830 auto_vec<const T *> vec_objs (uniq_map.elements ());
1831 for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
1832 iter != uniq_map.end (); ++iter)
1833 vec_objs.quick_push ((*iter).second);
1834
1835 vec_objs.qsort (T::cmp_ptr_ptr);
1836
1837 unsigned i;
1838 const T *obj;
1839 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1840 log_managed_object<T> (logger, obj);
808f4dfe
DM
1841}
1842
1843/* Dump the number of objects that were managed by MAP to LOGGER.
1844 If SHOW_OBJS is true, also dump the objects themselves. */
1845
1846template <typename T>
1847static void
1848log_uniq_map (logger *logger, bool show_objs, const char *title,
1849 const consolidation_map<T> &map)
1850{
3989337e 1851 logger->log (" # %s: %li", title, (long)map.elements ());
b0702ac5
DM
1852 if (!show_objs)
1853 return;
1854
1855 auto_vec<const T *> vec_objs (map.elements ());
1856 for (typename consolidation_map<T>::iterator iter = map.begin ();
1857 iter != map.end (); ++iter)
1858 vec_objs.quick_push ((*iter).second);
1859
1860 vec_objs.qsort (T::cmp_ptr_ptr);
1861
1862 unsigned i;
1863 const T *obj;
1864 FOR_EACH_VEC_ELT (vec_objs, i, obj)
1865 log_managed_object<T> (logger, obj);
808f4dfe
DM
1866}
1867
1868/* Dump the number of objects of each class that were managed by this
1869 manager to LOGGER.
1870 If SHOW_OBJS is true, also dump the objects themselves. */
1871
1872void
1873region_model_manager::log_stats (logger *logger, bool show_objs) const
1874{
1875 LOG_SCOPE (logger);
bb8e93eb
DM
1876 logger->log ("call string consolidation");
1877 m_empty_call_string.recursive_log (logger);
9d804f9b 1878 logger->log ("next symbol id: %i", m_next_symbol_id);
808f4dfe
DM
1879 logger->log ("svalue consolidation");
1880 log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
1881 log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
1882 if (m_unknown_NULL)
1883 log_managed_object (logger, m_unknown_NULL);
1884 log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
1885 log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
1886 log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
1887 log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
1888 log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
1889 log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
1890 log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
e61ffa20
DM
1891 log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
1892 log_uniq_map (logger, show_objs, "bits_within_svalue",
1893 m_bits_within_values_map);
808f4dfe
DM
1894 log_uniq_map (logger, show_objs, "unmergeable_svalue",
1895 m_unmergeable_values_map);
1896 log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
1897 log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
1898 log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
ded2c2c0
DM
1899 log_uniq_map (logger, show_objs, "asm_output_svalue",
1900 m_asm_output_values_map);
aee1adf2
DM
1901 log_uniq_map (logger, show_objs, "const_fn_result_svalue",
1902 m_const_fn_result_values_map);
ded2c2c0 1903
808f4dfe
DM
1904 logger->log ("max accepted svalue num_nodes: %i",
1905 m_max_complexity.m_num_nodes);
1906 logger->log ("max accepted svalue max_depth: %i",
1907 m_max_complexity.m_max_depth);
1908
1909 logger->log ("region consolidation");
808f4dfe
DM
1910 log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
1911 log_uniq_map (logger, show_objs, "label_region", m_labels_map);
1912 log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
1913 log_uniq_map (logger, show_objs, "field_region", m_field_regions);
1914 log_uniq_map (logger, show_objs, "element_region", m_element_regions);
1915 log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
e61ffa20 1916 log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
808f4dfe
DM
1917 log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
1918 log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
1919 log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
1920 log_uniq_map (logger, show_objs, "string_region", m_string_map);
93e759fc 1921 log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
2402dc6b 1922 log_uniq_map (logger, show_objs, "var_arg_region", m_var_arg_regions);
808f4dfe
DM
1923 logger->log (" # managed dynamic regions: %i",
1924 m_managed_dynamic_regions.length ());
1925 m_store_mgr.log_stats (logger, show_objs);
8ca7fa84 1926 m_range_mgr->log_stats (logger, show_objs);
808f4dfe
DM
1927}
1928
1929/* Dump the number of objects of each class that were managed by this
1930 manager to LOGGER.
1931 If SHOW_OBJS is true, also dump the objects themselves.
1932 This is here so it can use log_uniq_map. */
1933
1934void
1935store_manager::log_stats (logger *logger, bool show_objs) const
1936{
1937 LOG_SCOPE (logger);
1938 log_uniq_map (logger, show_objs, "concrete_binding",
1939 m_concrete_binding_key_mgr);
1940 log_uniq_map (logger, show_objs, "symbolic_binding",
1941 m_symbolic_binding_key_mgr);
1942}
1943
5f6197d7
DM
1944/* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
1945 (using -fdump-analyzer-untracked). */
1946
1947static void
1948dump_untracked_region (const decl_region *decl_reg)
1949{
1950 tree decl = decl_reg->get_decl ();
1951 if (TREE_CODE (decl) != VAR_DECL)
1952 return;
c788a0ea
DM
1953 /* For now, don't emit the status of decls in the constant pool, to avoid
1954 differences in DejaGnu test results between targets that use these vs
1955 those that don't.
1956 (Eventually these decls should probably be untracked and we should test
1957 for that, but that's not stage 4 material). */
1958 if (DECL_IN_CONSTANT_POOL (decl))
1959 return;
5f6197d7
DM
1960 warning_at (DECL_SOURCE_LOCATION (decl), 0,
1961 "track %qD: %s",
1962 decl, (decl_reg->tracked_p () ? "yes" : "no"));
1963}
1964
1965/* Implementation of -fdump-analyzer-untracked. */
1966
1967void
1968region_model_manager::dump_untracked_regions () const
1969{
1970 for (auto iter : m_globals_map)
1971 {
1972 const decl_region *decl_reg = iter.second;
1973 dump_untracked_region (decl_reg);
1974 }
1975 for (auto frame_iter : m_frame_regions)
1976 {
1977 const frame_region *frame_reg = frame_iter.second;
1978 frame_reg->dump_untracked_regions ();
1979 }
1980}
1981
1982void
1983frame_region::dump_untracked_regions () const
1984{
1985 for (auto iter : m_locals)
1986 {
1987 const decl_region *decl_reg = iter.second;
1988 dump_untracked_region (decl_reg);
1989 }
1990}
1991
808f4dfe
DM
1992} // namespace ana
1993
1994#endif /* #if ENABLE_ANALYZER */