]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/region-model.h
analyzer: consolidate call_string instances
[thirdparty/gcc.git] / gcc / analyzer / region-model.h
1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_ANALYZER_REGION_MODEL_H
22 #define GCC_ANALYZER_REGION_MODEL_H
23
24 /* Implementation of the region-based ternary model described in:
25 "A Memory Model for Static Analysis of C Programs"
26 (Zhongxing Xu, Ted Kremenek, and Jian Zhang)
27 http://lcs.ios.ac.cn/~xuzb/canalyze/memmodel.pdf */
28
29 #include "analyzer/svalue.h"
30 #include "analyzer/region.h"
31
32 using namespace ana;
33
34 namespace inchash
35 {
36 extern void add_path_var (path_var pv, hash &hstate);
37 } // namespace inchash
38
39 namespace ana {
40
41 template <typename T>
42 class one_way_id_map
43 {
44 public:
45 one_way_id_map (int num_ids);
46 void put (T src, T dst);
47 T get_dst_for_src (T src) const;
48 void dump_to_pp (pretty_printer *pp) const;
49 void dump () const;
50 void update (T *) const;
51
52 private:
53 auto_vec<T> m_src_to_dst;
54 };
55
56 /* class one_way_id_map. */
57
58 /* one_way_id_map's ctor, which populates the map with dummy null values. */
59
60 template <typename T>
61 inline one_way_id_map<T>::one_way_id_map (int num_svalues)
62 : m_src_to_dst (num_svalues)
63 {
64 for (int i = 0; i < num_svalues; i++)
65 m_src_to_dst.quick_push (T::null ());
66 }
67
68 /* Record that SRC is to be mapped to DST. */
69
70 template <typename T>
71 inline void
72 one_way_id_map<T>::put (T src, T dst)
73 {
74 m_src_to_dst[src.as_int ()] = dst;
75 }
76
77 /* Get the new value for SRC within the map. */
78
79 template <typename T>
80 inline T
81 one_way_id_map<T>::get_dst_for_src (T src) const
82 {
83 if (src.null_p ())
84 return src;
85 return m_src_to_dst[src.as_int ()];
86 }
87
88 /* Dump this map to PP. */
89
90 template <typename T>
91 inline void
92 one_way_id_map<T>::dump_to_pp (pretty_printer *pp) const
93 {
94 pp_string (pp, "src to dst: {");
95 unsigned i;
96 T *dst;
97 FOR_EACH_VEC_ELT (m_src_to_dst, i, dst)
98 {
99 if (i > 0)
100 pp_string (pp, ", ");
101 T src (T::from_int (i));
102 src.print (pp);
103 pp_string (pp, " -> ");
104 dst->print (pp);
105 }
106 pp_string (pp, "}");
107 pp_newline (pp);
108 }
109
110 /* Dump this map to stderr. */
111
112 template <typename T>
113 DEBUG_FUNCTION inline void
114 one_way_id_map<T>::dump () const
115 {
116 pretty_printer pp;
117 pp.buffer->stream = stderr;
118 dump_to_pp (&pp);
119 pp_flush (&pp);
120 }
121
122 /* Update *ID from the old value to its new value in this map. */
123
124 template <typename T>
125 inline void
126 one_way_id_map<T>::update (T *id) const
127 {
128 *id = get_dst_for_src (*id);
129 }
130
131 /* A mapping from region to svalue for use when tracking state. */
132
133 class region_to_value_map
134 {
135 public:
136 typedef hash_map<const region *, const svalue *> hash_map_t;
137 typedef hash_map_t::iterator iterator;
138
139 region_to_value_map () : m_hash_map () {}
140 region_to_value_map (const region_to_value_map &other)
141 : m_hash_map (other.m_hash_map) {}
142 region_to_value_map &operator= (const region_to_value_map &other);
143
144 bool operator== (const region_to_value_map &other) const;
145 bool operator!= (const region_to_value_map &other) const
146 {
147 return !(*this == other);
148 }
149
150 iterator begin () const { return m_hash_map.begin (); }
151 iterator end () const { return m_hash_map.end (); }
152
153 const svalue * const *get (const region *reg) const
154 {
155 return const_cast <hash_map_t &> (m_hash_map).get (reg);
156 }
157 void put (const region *reg, const svalue *sval)
158 {
159 m_hash_map.put (reg, sval);
160 }
161 void remove (const region *reg)
162 {
163 m_hash_map.remove (reg);
164 }
165
166 bool is_empty () const { return m_hash_map.is_empty (); }
167
168 void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
169 void dump (bool simple) const;
170
171 bool can_merge_with_p (const region_to_value_map &other,
172 region_to_value_map *out) const;
173
174 void purge_state_involving (const svalue *sval);
175
176 private:
177 hash_map_t m_hash_map;
178 };
179
180 /* Various operations delete information from a region_model.
181
182 This struct tracks how many of each kind of entity were purged (e.g.
183 for selftests, and for debugging). */
184
185 struct purge_stats
186 {
187 purge_stats ()
188 : m_num_svalues (0),
189 m_num_regions (0),
190 m_num_equiv_classes (0),
191 m_num_constraints (0),
192 m_num_bounded_ranges_constraints (0),
193 m_num_client_items (0)
194 {}
195
196 int m_num_svalues;
197 int m_num_regions;
198 int m_num_equiv_classes;
199 int m_num_constraints;
200 int m_num_bounded_ranges_constraints;
201 int m_num_client_items;
202 };
203
204 /* A base class for visiting regions and svalues, with do-nothing
205 base implementations of the per-subclass vfuncs. */
206
207 class visitor
208 {
209 public:
210 virtual void visit_region_svalue (const region_svalue *) {}
211 virtual void visit_constant_svalue (const constant_svalue *) {}
212 virtual void visit_unknown_svalue (const unknown_svalue *) {}
213 virtual void visit_poisoned_svalue (const poisoned_svalue *) {}
214 virtual void visit_setjmp_svalue (const setjmp_svalue *) {}
215 virtual void visit_initial_svalue (const initial_svalue *) {}
216 virtual void visit_unaryop_svalue (const unaryop_svalue *) {}
217 virtual void visit_binop_svalue (const binop_svalue *) {}
218 virtual void visit_sub_svalue (const sub_svalue *) {}
219 virtual void visit_repeated_svalue (const repeated_svalue *) {}
220 virtual void visit_bits_within_svalue (const bits_within_svalue *) {}
221 virtual void visit_unmergeable_svalue (const unmergeable_svalue *) {}
222 virtual void visit_placeholder_svalue (const placeholder_svalue *) {}
223 virtual void visit_widening_svalue (const widening_svalue *) {}
224 virtual void visit_compound_svalue (const compound_svalue *) {}
225 virtual void visit_conjured_svalue (const conjured_svalue *) {}
226 virtual void visit_asm_output_svalue (const asm_output_svalue *) {}
227 virtual void visit_const_fn_result_svalue (const const_fn_result_svalue *) {}
228
229 virtual void visit_region (const region *) {}
230 };
231
232 } // namespace ana
233
234 namespace ana {
235
236 /* A class responsible for owning and consolidating region and svalue
237 instances.
238 region and svalue instances are immutable as far as clients are
239 concerned, so they are provided as "const" ptrs. */
240
241 class region_model_manager
242 {
243 public:
244 region_model_manager (logger *logger = NULL);
245 ~region_model_manager ();
246
247 /* call_string consolidation. */
248 const call_string &get_empty_call_string () const
249 {
250 return m_empty_call_string;
251 }
252
253 /* svalue consolidation. */
254 const svalue *get_or_create_constant_svalue (tree cst_expr);
255 const svalue *get_or_create_int_cst (tree type, poly_int64);
256 const svalue *get_or_create_unknown_svalue (tree type);
257 const svalue *get_or_create_setjmp_svalue (const setjmp_record &r,
258 tree type);
259 const svalue *get_or_create_poisoned_svalue (enum poison_kind kind,
260 tree type);
261 const svalue *get_or_create_initial_value (const region *reg);
262 const svalue *get_ptr_svalue (tree ptr_type, const region *pointee);
263 const svalue *get_or_create_unaryop (tree type, enum tree_code op,
264 const svalue *arg);
265 const svalue *get_or_create_cast (tree type, const svalue *arg);
266 const svalue *get_or_create_binop (tree type,
267 enum tree_code op,
268 const svalue *arg0, const svalue *arg1);
269 const svalue *get_or_create_sub_svalue (tree type,
270 const svalue *parent_svalue,
271 const region *subregion);
272 const svalue *get_or_create_repeated_svalue (tree type,
273 const svalue *outer_size,
274 const svalue *inner_svalue);
275 const svalue *get_or_create_bits_within (tree type,
276 const bit_range &bits,
277 const svalue *inner_svalue);
278 const svalue *get_or_create_unmergeable (const svalue *arg);
279 const svalue *get_or_create_widening_svalue (tree type,
280 const program_point &point,
281 const svalue *base_svalue,
282 const svalue *iter_svalue);
283 const svalue *get_or_create_compound_svalue (tree type,
284 const binding_map &map);
285 const svalue *get_or_create_conjured_svalue (tree type, const gimple *stmt,
286 const region *id_reg,
287 const conjured_purge &p);
288 const svalue *
289 get_or_create_asm_output_svalue (tree type,
290 const gasm *asm_stmt,
291 unsigned output_idx,
292 const vec<const svalue *> &inputs);
293 const svalue *
294 get_or_create_const_fn_result_svalue (tree type,
295 tree fndecl,
296 const vec<const svalue *> &inputs);
297
298 const svalue *maybe_get_char_from_string_cst (tree string_cst,
299 tree byte_offset_cst);
300
301 /* Dynamically-allocated svalue instances.
302 The number of these within the analysis can grow arbitrarily.
303 They are still owned by the manager. */
304 const svalue *create_unique_svalue (tree type);
305
306 /* region consolidation. */
307 const stack_region * get_stack_region () const { return &m_stack_region; }
308 const heap_region *get_heap_region () const { return &m_heap_region; }
309 const code_region *get_code_region () const { return &m_code_region; }
310 const globals_region *get_globals_region () const
311 {
312 return &m_globals_region;
313 }
314 const function_region *get_region_for_fndecl (tree fndecl);
315 const label_region *get_region_for_label (tree label);
316 const decl_region *get_region_for_global (tree expr);
317 const region *get_field_region (const region *parent, tree field);
318 const region *get_element_region (const region *parent,
319 tree element_type,
320 const svalue *index);
321 const region *get_offset_region (const region *parent,
322 tree type,
323 const svalue *byte_offset);
324 const region *get_sized_region (const region *parent,
325 tree type,
326 const svalue *byte_size_sval);
327 const region *get_cast_region (const region *original_region,
328 tree type);
329 const frame_region *get_frame_region (const frame_region *calling_frame,
330 function *fun);
331 const region *get_symbolic_region (const svalue *sval);
332 const string_region *get_region_for_string (tree string_cst);
333 const region *get_bit_range (const region *parent, tree type,
334 const bit_range &bits);
335 const var_arg_region *get_var_arg_region (const frame_region *parent,
336 unsigned idx);
337
338 const region *get_unknown_symbolic_region (tree region_type);
339
340 const region *
341 get_region_for_unexpected_tree_code (region_model_context *ctxt,
342 tree t,
343 const dump_location_t &loc);
344
345 unsigned alloc_region_id () { return m_next_region_id++; }
346
347 store_manager *get_store_manager () { return &m_store_mgr; }
348 bounded_ranges_manager *get_range_manager () const { return m_range_mgr; }
349
350 /* Dynamically-allocated region instances.
351 The number of these within the analysis can grow arbitrarily.
352 They are still owned by the manager. */
353 const region *create_region_for_heap_alloc ();
354 const region *create_region_for_alloca (const frame_region *frame);
355
356 void log_stats (logger *logger, bool show_objs) const;
357
358 void begin_checking_feasibility (void) { m_checking_feasibility = true; }
359 void end_checking_feasibility (void) { m_checking_feasibility = false; }
360
361 logger *get_logger () const { return m_logger; }
362
363 void dump_untracked_regions () const;
364
365 private:
366 bool too_complex_p (const complexity &c) const;
367 bool reject_if_too_complex (svalue *sval);
368
369 const svalue *maybe_fold_unaryop (tree type, enum tree_code op,
370 const svalue *arg);
371 const svalue *maybe_fold_binop (tree type, enum tree_code op,
372 const svalue *arg0, const svalue *arg1);
373 const svalue *maybe_fold_sub_svalue (tree type,
374 const svalue *parent_svalue,
375 const region *subregion);
376 const svalue *maybe_fold_repeated_svalue (tree type,
377 const svalue *outer_size,
378 const svalue *inner_svalue);
379 const svalue *maybe_fold_bits_within_svalue (tree type,
380 const bit_range &bits,
381 const svalue *inner_svalue);
382 const svalue *maybe_undo_optimize_bit_field_compare (tree type,
383 const compound_svalue *compound_sval,
384 tree cst, const svalue *arg1);
385 const svalue *maybe_fold_asm_output_svalue (tree type,
386 const vec<const svalue *> &inputs);
387
388 logger *m_logger;
389
390 const call_string m_empty_call_string;
391
392 unsigned m_next_region_id;
393 root_region m_root_region;
394 stack_region m_stack_region;
395 heap_region m_heap_region;
396
397 /* svalue consolidation. */
398 typedef hash_map<tree, constant_svalue *> constants_map_t;
399 constants_map_t m_constants_map;
400
401 typedef hash_map<tree, unknown_svalue *> unknowns_map_t;
402 unknowns_map_t m_unknowns_map;
403 const unknown_svalue *m_unknown_NULL;
404
405 typedef hash_map<poisoned_svalue::key_t,
406 poisoned_svalue *> poisoned_values_map_t;
407 poisoned_values_map_t m_poisoned_values_map;
408
409 typedef hash_map<setjmp_svalue::key_t,
410 setjmp_svalue *> setjmp_values_map_t;
411 setjmp_values_map_t m_setjmp_values_map;
412
413 typedef hash_map<const region *, initial_svalue *> initial_values_map_t;
414 initial_values_map_t m_initial_values_map;
415
416 typedef hash_map<region_svalue::key_t, region_svalue *> pointer_values_map_t;
417 pointer_values_map_t m_pointer_values_map;
418
419 typedef hash_map<unaryop_svalue::key_t,
420 unaryop_svalue *> unaryop_values_map_t;
421 unaryop_values_map_t m_unaryop_values_map;
422
423 typedef hash_map<binop_svalue::key_t, binop_svalue *> binop_values_map_t;
424 binop_values_map_t m_binop_values_map;
425
426 typedef hash_map<sub_svalue::key_t, sub_svalue *> sub_values_map_t;
427 sub_values_map_t m_sub_values_map;
428
429 typedef hash_map<repeated_svalue::key_t,
430 repeated_svalue *> repeated_values_map_t;
431 repeated_values_map_t m_repeated_values_map;
432
433 typedef hash_map<bits_within_svalue::key_t,
434 bits_within_svalue *> bits_within_values_map_t;
435 bits_within_values_map_t m_bits_within_values_map;
436
437 typedef hash_map<const svalue *,
438 unmergeable_svalue *> unmergeable_values_map_t;
439 unmergeable_values_map_t m_unmergeable_values_map;
440
441 typedef hash_map<widening_svalue::key_t,
442 widening_svalue */*,
443 widening_svalue::key_t::hash_map_traits*/>
444 widening_values_map_t;
445 widening_values_map_t m_widening_values_map;
446
447 typedef hash_map<compound_svalue::key_t,
448 compound_svalue *> compound_values_map_t;
449 compound_values_map_t m_compound_values_map;
450
451 typedef hash_map<conjured_svalue::key_t,
452 conjured_svalue *> conjured_values_map_t;
453 conjured_values_map_t m_conjured_values_map;
454
455 typedef hash_map<asm_output_svalue::key_t,
456 asm_output_svalue *> asm_output_values_map_t;
457 asm_output_values_map_t m_asm_output_values_map;
458
459 typedef hash_map<const_fn_result_svalue::key_t,
460 const_fn_result_svalue *> const_fn_result_values_map_t;
461 const_fn_result_values_map_t m_const_fn_result_values_map;
462
463 bool m_checking_feasibility;
464
465 /* "Dynamically-allocated" svalue instances.
466 The number of these within the analysis can grow arbitrarily.
467 They are still owned by the manager. */
468 auto_delete_vec<svalue> m_managed_dynamic_svalues;
469
470 /* Maximum complexity of svalues that weren't rejected. */
471 complexity m_max_complexity;
472
473 /* region consolidation. */
474
475 code_region m_code_region;
476 typedef hash_map<tree, function_region *> fndecls_map_t;
477 typedef fndecls_map_t::iterator fndecls_iterator_t;
478 fndecls_map_t m_fndecls_map;
479
480 typedef hash_map<tree, label_region *> labels_map_t;
481 typedef labels_map_t::iterator labels_iterator_t;
482 labels_map_t m_labels_map;
483
484 globals_region m_globals_region;
485 typedef hash_map<tree, decl_region *> globals_map_t;
486 typedef globals_map_t::iterator globals_iterator_t;
487 globals_map_t m_globals_map;
488
489 consolidation_map<field_region> m_field_regions;
490 consolidation_map<element_region> m_element_regions;
491 consolidation_map<offset_region> m_offset_regions;
492 consolidation_map<sized_region> m_sized_regions;
493 consolidation_map<cast_region> m_cast_regions;
494 consolidation_map<frame_region> m_frame_regions;
495 consolidation_map<symbolic_region> m_symbolic_regions;
496
497 typedef hash_map<tree, string_region *> string_map_t;
498 string_map_t m_string_map;
499
500 consolidation_map<bit_range_region> m_bit_range_regions;
501 consolidation_map<var_arg_region> m_var_arg_regions;
502
503 store_manager m_store_mgr;
504
505 bounded_ranges_manager *m_range_mgr;
506
507 /* "Dynamically-allocated" region instances.
508 The number of these within the analysis can grow arbitrarily.
509 They are still owned by the manager. */
510 auto_delete_vec<region> m_managed_dynamic_regions;
511 };
512
513 struct append_regions_cb_data;
514
515 /* Helper class for handling calls to functions with known behavior.
516 Implemented in region-model-impl-calls.c. */
517
518 class call_details
519 {
520 public:
521 call_details (const gcall *call, region_model *model,
522 region_model_context *ctxt);
523
524 region_model_manager *get_manager () const;
525 region_model_context *get_ctxt () const { return m_ctxt; }
526 uncertainty_t *get_uncertainty () const;
527 tree get_lhs_type () const { return m_lhs_type; }
528 const region *get_lhs_region () const { return m_lhs_region; }
529
530 bool maybe_set_lhs (const svalue *result) const;
531
532 unsigned num_args () const;
533
534 const gcall *get_call_stmt () const { return m_call; }
535
536 tree get_arg_tree (unsigned idx) const;
537 tree get_arg_type (unsigned idx) const;
538 const svalue *get_arg_svalue (unsigned idx) const;
539 const char *get_arg_string_literal (unsigned idx) const;
540
541 tree get_fndecl_for_call () const;
542
543 void dump_to_pp (pretty_printer *pp, bool simple) const;
544 void dump (bool simple) const;
545
546 const svalue *get_or_create_conjured_svalue (const region *) const;
547
548 private:
549 const gcall *m_call;
550 region_model *m_model;
551 region_model_context *m_ctxt;
552 tree m_lhs_type;
553 const region *m_lhs_region;
554 };
555
556 /* A region_model encapsulates a representation of the state of memory, with
557 a tree of regions, along with their associated values.
558 The representation is graph-like because values can be pointers to
559 regions.
560 It also stores:
561 - a constraint_manager, capturing relationships between the values, and
562 - dynamic extents, mapping dynamically-allocated regions to svalues (their
563 capacities). */
564
565 class region_model
566 {
567 public:
568 typedef region_to_value_map dynamic_extents_t;
569
570 region_model (region_model_manager *mgr);
571 region_model (const region_model &other);
572 ~region_model ();
573 region_model &operator= (const region_model &other);
574
575 bool operator== (const region_model &other) const;
576 bool operator!= (const region_model &other) const
577 {
578 return !(*this == other);
579 }
580
581 hashval_t hash () const;
582
583 void print (pretty_printer *pp) const;
584
585 void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const;
586 void dump (FILE *fp, bool simple, bool multiline) const;
587 void dump (bool simple) const;
588
589 void debug () const;
590
591 void validate () const;
592
593 void canonicalize ();
594 bool canonicalized_p () const;
595
596 void
597 on_stmt_pre (const gimple *stmt,
598 bool *out_terminate_path,
599 bool *out_unknown_side_effects,
600 region_model_context *ctxt);
601
602 void on_assignment (const gassign *stmt, region_model_context *ctxt);
603 const svalue *get_gassign_result (const gassign *assign,
604 region_model_context *ctxt);
605 void on_asm_stmt (const gasm *asm_stmt, region_model_context *ctxt);
606 bool on_call_pre (const gcall *stmt, region_model_context *ctxt,
607 bool *out_terminate_path);
608 void on_call_post (const gcall *stmt,
609 bool unknown_side_effects,
610 region_model_context *ctxt);
611
612 void purge_state_involving (const svalue *sval, region_model_context *ctxt);
613
614 /* Specific handling for on_call_pre. */
615 void impl_call_alloca (const call_details &cd);
616 void impl_call_analyzer_describe (const gcall *call,
617 region_model_context *ctxt);
618 void impl_call_analyzer_dump_capacity (const gcall *call,
619 region_model_context *ctxt);
620 void impl_call_analyzer_dump_escaped (const gcall *call);
621 void impl_call_analyzer_eval (const gcall *call,
622 region_model_context *ctxt);
623 void impl_call_builtin_expect (const call_details &cd);
624 void impl_call_calloc (const call_details &cd);
625 bool impl_call_error (const call_details &cd, unsigned min_args,
626 bool *out_terminate_path);
627 void impl_call_fgets (const call_details &cd);
628 void impl_call_fread (const call_details &cd);
629 void impl_call_free (const call_details &cd);
630 void impl_call_malloc (const call_details &cd);
631 void impl_call_memcpy (const call_details &cd);
632 void impl_call_memset (const call_details &cd);
633 void impl_call_realloc (const call_details &cd);
634 void impl_call_strchr (const call_details &cd);
635 void impl_call_strcpy (const call_details &cd);
636 void impl_call_strlen (const call_details &cd);
637 void impl_call_operator_new (const call_details &cd);
638 void impl_call_operator_delete (const call_details &cd);
639 void impl_deallocation_call (const call_details &cd);
640
641 /* Implemented in varargs.cc. */
642 void impl_call_va_start (const call_details &cd);
643 void impl_call_va_copy (const call_details &cd);
644 void impl_call_va_arg (const call_details &cd);
645 void impl_call_va_end (const call_details &cd);
646
647 void handle_unrecognized_call (const gcall *call,
648 region_model_context *ctxt);
649 void get_reachable_svalues (svalue_set *out,
650 const svalue *extra_sval,
651 const uncertainty_t *uncertainty);
652
653 void on_return (const greturn *stmt, region_model_context *ctxt);
654 void on_setjmp (const gcall *stmt, const exploded_node *enode,
655 region_model_context *ctxt);
656 void on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
657 int setjmp_stack_depth, region_model_context *ctxt);
658
659 void update_for_phis (const supernode *snode,
660 const cfg_superedge *last_cfg_superedge,
661 region_model_context *ctxt);
662
663 void handle_phi (const gphi *phi, tree lhs, tree rhs,
664 const region_model &old_state,
665 region_model_context *ctxt);
666
667 bool maybe_update_for_edge (const superedge &edge,
668 const gimple *last_stmt,
669 region_model_context *ctxt,
670 rejected_constraint **out);
671
672 void update_for_gcall (const gcall *call_stmt,
673 region_model_context *ctxt,
674 function *callee = NULL);
675
676 void update_for_return_gcall (const gcall *call_stmt,
677 region_model_context *ctxt);
678
679 const region *push_frame (function *fun, const vec<const svalue *> *arg_sids,
680 region_model_context *ctxt);
681 const frame_region *get_current_frame () const { return m_current_frame; }
682 function * get_current_function () const;
683 void pop_frame (tree result_lvalue,
684 const svalue **out_result,
685 region_model_context *ctxt);
686 int get_stack_depth () const;
687 const frame_region *get_frame_at_index (int index) const;
688
689 const region *get_lvalue (path_var pv, region_model_context *ctxt) const;
690 const region *get_lvalue (tree expr, region_model_context *ctxt) const;
691 const svalue *get_rvalue (path_var pv, region_model_context *ctxt) const;
692 const svalue *get_rvalue (tree expr, region_model_context *ctxt) const;
693
694 const region *deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
695 region_model_context *ctxt) const;
696
697 const svalue *get_rvalue_for_bits (tree type,
698 const region *reg,
699 const bit_range &bits,
700 region_model_context *ctxt) const;
701
702 void set_value (const region *lhs_reg, const svalue *rhs_sval,
703 region_model_context *ctxt);
704 void set_value (tree lhs, tree rhs, region_model_context *ctxt);
705 void clobber_region (const region *reg);
706 void purge_region (const region *reg);
707 void fill_region (const region *reg, const svalue *sval);
708 void zero_fill_region (const region *reg);
709 void mark_region_as_unknown (const region *reg, uncertainty_t *uncertainty);
710
711 tristate eval_condition (const svalue *lhs,
712 enum tree_code op,
713 const svalue *rhs) const;
714 tristate eval_condition_without_cm (const svalue *lhs,
715 enum tree_code op,
716 const svalue *rhs) const;
717 tristate compare_initial_and_pointer (const initial_svalue *init,
718 const region_svalue *ptr) const;
719 tristate eval_condition (tree lhs,
720 enum tree_code op,
721 tree rhs,
722 region_model_context *ctxt);
723 bool add_constraint (tree lhs, enum tree_code op, tree rhs,
724 region_model_context *ctxt);
725 bool add_constraint (tree lhs, enum tree_code op, tree rhs,
726 region_model_context *ctxt,
727 rejected_constraint **out);
728
729 const region *create_region_for_heap_alloc (const svalue *size_in_bytes,
730 region_model_context *ctxt);
731 const region *create_region_for_alloca (const svalue *size_in_bytes,
732 region_model_context *ctxt);
733
734 tree get_representative_tree (const svalue *sval) const;
735 path_var
736 get_representative_path_var (const svalue *sval,
737 svalue_set *visited) const;
738 path_var
739 get_representative_path_var (const region *reg,
740 svalue_set *visited) const;
741
742 /* For selftests. */
743 constraint_manager *get_constraints ()
744 {
745 return m_constraints;
746 }
747
748 store *get_store () { return &m_store; }
749 const store *get_store () const { return &m_store; }
750
751 const dynamic_extents_t &
752 get_dynamic_extents () const
753 {
754 return m_dynamic_extents;
755 }
756 const svalue *get_dynamic_extents (const region *reg) const;
757 void set_dynamic_extents (const region *reg,
758 const svalue *size_in_bytes,
759 region_model_context *ctxt);
760 void unset_dynamic_extents (const region *reg);
761
762 region_model_manager *get_manager () const { return m_mgr; }
763 bounded_ranges_manager *get_range_manager () const
764 {
765 return m_mgr->get_range_manager ();
766 }
767
768 void unbind_region_and_descendents (const region *reg,
769 enum poison_kind pkind);
770
771 bool can_merge_with_p (const region_model &other_model,
772 const program_point &point,
773 region_model *out_model,
774 const extrinsic_state *ext_state = NULL,
775 const program_state *state_a = NULL,
776 const program_state *state_b = NULL) const;
777
778 tree get_fndecl_for_call (const gcall *call,
779 region_model_context *ctxt);
780
781 void get_regions_for_current_frame (auto_vec<const decl_region *> *out) const;
782 static void append_regions_cb (const region *base_reg,
783 struct append_regions_cb_data *data);
784
785 const svalue *get_store_value (const region *reg,
786 region_model_context *ctxt) const;
787
788 bool region_exists_p (const region *reg) const;
789
790 void loop_replay_fixup (const region_model *dst_state);
791
792 const svalue *get_capacity (const region *reg) const;
793
794 /* Implemented in sm-malloc.cc */
795 void on_realloc_with_move (const call_details &cd,
796 const svalue *old_ptr_sval,
797 const svalue *new_ptr_sval);
798
799 private:
800 const region *get_lvalue_1 (path_var pv, region_model_context *ctxt) const;
801 const svalue *get_rvalue_1 (path_var pv, region_model_context *ctxt) const;
802
803 path_var
804 get_representative_path_var_1 (const svalue *sval,
805 svalue_set *visited) const;
806 path_var
807 get_representative_path_var_1 (const region *reg,
808 svalue_set *visited) const;
809
810 bool add_constraint (const svalue *lhs,
811 enum tree_code op,
812 const svalue *rhs,
813 region_model_context *ctxt);
814 bool add_constraints_from_binop (const svalue *outer_lhs,
815 enum tree_code outer_op,
816 const svalue *outer_rhs,
817 bool *out,
818 region_model_context *ctxt);
819
820 void update_for_call_superedge (const call_superedge &call_edge,
821 region_model_context *ctxt);
822 void update_for_return_superedge (const return_superedge &return_edge,
823 region_model_context *ctxt);
824 void update_for_call_summary (const callgraph_superedge &cg_sedge,
825 region_model_context *ctxt);
826 bool apply_constraints_for_gcond (const cfg_superedge &edge,
827 const gcond *cond_stmt,
828 region_model_context *ctxt,
829 rejected_constraint **out);
830 bool apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
831 const gswitch *switch_stmt,
832 region_model_context *ctxt,
833 rejected_constraint **out);
834 bool apply_constraints_for_exception (const gimple *last_stmt,
835 region_model_context *ctxt,
836 rejected_constraint **out);
837
838 int poison_any_pointers_to_descendents (const region *reg,
839 enum poison_kind pkind);
840
841 void on_top_level_param (tree param, region_model_context *ctxt);
842
843 bool called_from_main_p () const;
844 const svalue *get_initial_value_for_global (const region *reg) const;
845
846 const svalue *check_for_poison (const svalue *sval,
847 tree expr,
848 region_model_context *ctxt) const;
849 const region * get_region_for_poisoned_expr (tree expr) const;
850
851 void check_dynamic_size_for_taint (enum memory_space mem_space,
852 const svalue *size_in_bytes,
853 region_model_context *ctxt) const;
854
855 void check_region_for_taint (const region *reg,
856 enum access_direction dir,
857 region_model_context *ctxt) const;
858
859 void check_for_writable_region (const region* dest_reg,
860 region_model_context *ctxt) const;
861 void check_region_access (const region *reg,
862 enum access_direction dir,
863 region_model_context *ctxt) const;
864 void check_region_for_write (const region *dest_reg,
865 region_model_context *ctxt) const;
866 void check_region_for_read (const region *src_reg,
867 region_model_context *ctxt) const;
868
869 void check_call_args (const call_details &cd) const;
870 void check_external_function_for_access_attr (const gcall *call,
871 tree callee_fndecl,
872 region_model_context *ctxt) const;
873
874 /* Storing this here to avoid passing it around everywhere. */
875 region_model_manager *const m_mgr;
876
877 store m_store;
878
879 constraint_manager *m_constraints; // TODO: embed, rather than dynalloc?
880
881 const frame_region *m_current_frame;
882
883 /* Map from base region to size in bytes, for tracking the sizes of
884 dynamically-allocated regions.
885 This is part of the region_model rather than the region to allow for
886 memory regions to be resized (e.g. by realloc). */
887 dynamic_extents_t m_dynamic_extents;
888 };
889
890 /* Some region_model activity could lead to warnings (e.g. attempts to use an
891 uninitialized value). This abstract base class encapsulates an interface
892 for the region model to use when emitting such warnings.
893
894 Having this as an abstract base class allows us to support the various
895 operations needed by program_state in the analyzer within region_model,
896 whilst keeping them somewhat modularized. */
897
898 class region_model_context
899 {
900 public:
901 /* Hook for clients to store pending diagnostics.
902 Return true if the diagnostic was stored, or false if it was deleted. */
903 virtual bool warn (pending_diagnostic *d) = 0;
904
905 /* Hook for clients to add a note to the last previously stored pending diagnostic.
906 Takes ownership of the pending_node (or deletes it). */
907 virtual void add_note (pending_note *pn) = 0;
908
909 /* Hook for clients to be notified when an SVAL that was reachable
910 in a previous state is no longer live, so that clients can emit warnings
911 about leaks. */
912 virtual void on_svalue_leak (const svalue *sval) = 0;
913
914 /* Hook for clients to be notified when the set of explicitly live
915 svalues changes, so that they can purge state relating to dead
916 svalues. */
917 virtual void on_liveness_change (const svalue_set &live_svalues,
918 const region_model *model) = 0;
919
920 virtual logger *get_logger () = 0;
921
922 /* Hook for clients to be notified when the condition
923 "LHS OP RHS" is added to the region model.
924 This exists so that state machines can detect tests on edges,
925 and use them to trigger sm-state transitions (e.g. transitions due
926 to ptrs becoming known to be NULL or non-NULL, rather than just
927 "unchecked") */
928 virtual void on_condition (const svalue *lhs,
929 enum tree_code op,
930 const svalue *rhs) = 0;
931
932 /* Hooks for clients to be notified when an unknown change happens
933 to SVAL (in response to a call to an unknown function). */
934 virtual void on_unknown_change (const svalue *sval, bool is_mutable) = 0;
935
936 /* Hooks for clients to be notified when a phi node is handled,
937 where RHS is the pertinent argument. */
938 virtual void on_phi (const gphi *phi, tree rhs) = 0;
939
940 /* Hooks for clients to be notified when the region model doesn't
941 know how to handle the tree code of T at LOC. */
942 virtual void on_unexpected_tree_code (tree t,
943 const dump_location_t &loc) = 0;
944
945 /* Hook for clients to be notified when a function_decl escapes. */
946 virtual void on_escaped_function (tree fndecl) = 0;
947
948 virtual uncertainty_t *get_uncertainty () = 0;
949
950 /* Hook for clients to purge state involving SVAL. */
951 virtual void purge_state_involving (const svalue *sval) = 0;
952
953 /* Hook for clients to split state with a non-standard path.
954 Take ownership of INFO. */
955 virtual void bifurcate (custom_edge_info *info) = 0;
956
957 /* Hook for clients to terminate the standard path. */
958 virtual void terminate_path () = 0;
959
960 virtual const extrinsic_state *get_ext_state () const = 0;
961
962 /* Hook for clients to access the "malloc" state machine in
963 any underlying program_state. */
964 virtual bool get_malloc_map (sm_state_map **out_smap,
965 const state_machine **out_sm,
966 unsigned *out_sm_idx) = 0;
967 /* Likewise for the "taint" state machine. */
968 virtual bool get_taint_map (sm_state_map **out_smap,
969 const state_machine **out_sm,
970 unsigned *out_sm_idx) = 0;
971
972 /* Get the current statement, if any. */
973 virtual const gimple *get_stmt () const = 0;
974 };
975
976 /* A "do nothing" subclass of region_model_context. */
977
978 class noop_region_model_context : public region_model_context
979 {
980 public:
981 bool warn (pending_diagnostic *) override { return false; }
982 void add_note (pending_note *pn) override;
983 void on_svalue_leak (const svalue *) override {}
984 void on_liveness_change (const svalue_set &,
985 const region_model *) override {}
986 logger *get_logger () override { return NULL; }
987 void on_condition (const svalue *lhs ATTRIBUTE_UNUSED,
988 enum tree_code op ATTRIBUTE_UNUSED,
989 const svalue *rhs ATTRIBUTE_UNUSED) override
990 {
991 }
992 void on_unknown_change (const svalue *sval ATTRIBUTE_UNUSED,
993 bool is_mutable ATTRIBUTE_UNUSED) override
994 {
995 }
996 void on_phi (const gphi *phi ATTRIBUTE_UNUSED,
997 tree rhs ATTRIBUTE_UNUSED) override
998 {
999 }
1000 void on_unexpected_tree_code (tree, const dump_location_t &) override {}
1001
1002 void on_escaped_function (tree) override {}
1003
1004 uncertainty_t *get_uncertainty () override { return NULL; }
1005
1006 void purge_state_involving (const svalue *sval ATTRIBUTE_UNUSED) override {}
1007
1008 void bifurcate (custom_edge_info *info) override;
1009 void terminate_path () override;
1010
1011 const extrinsic_state *get_ext_state () const override { return NULL; }
1012
1013 bool get_malloc_map (sm_state_map **,
1014 const state_machine **,
1015 unsigned *) override
1016 {
1017 return false;
1018 }
1019 bool get_taint_map (sm_state_map **,
1020 const state_machine **,
1021 unsigned *) override
1022 {
1023 return false;
1024 }
1025
1026 const gimple *get_stmt () const override { return NULL; }
1027 };
1028
1029 /* A subclass of region_model_context for determining if operations fail
1030 e.g. "can we generate a region for the lvalue of EXPR?". */
1031
1032 class tentative_region_model_context : public noop_region_model_context
1033 {
1034 public:
1035 tentative_region_model_context () : m_num_unexpected_codes (0) {}
1036
1037 void on_unexpected_tree_code (tree, const dump_location_t &)
1038 final override
1039 {
1040 m_num_unexpected_codes++;
1041 }
1042
1043 bool had_errors_p () const { return m_num_unexpected_codes > 0; }
1044
1045 private:
1046 int m_num_unexpected_codes;
1047 };
1048
1049 /* Subclass of region_model_context that wraps another context, allowing
1050 for extra code to be added to the various hooks. */
1051
1052 class region_model_context_decorator : public region_model_context
1053 {
1054 public:
1055 bool warn (pending_diagnostic *d) override
1056 {
1057 return m_inner->warn (d);
1058 }
1059
1060 void add_note (pending_note *pn) override
1061 {
1062 m_inner->add_note (pn);
1063 }
1064
1065 void on_svalue_leak (const svalue *sval) override
1066 {
1067 m_inner->on_svalue_leak (sval);
1068 }
1069
1070 void on_liveness_change (const svalue_set &live_svalues,
1071 const region_model *model) override
1072 {
1073 m_inner->on_liveness_change (live_svalues, model);
1074 }
1075
1076 logger *get_logger () override
1077 {
1078 return m_inner->get_logger ();
1079 }
1080
1081 void on_condition (const svalue *lhs,
1082 enum tree_code op,
1083 const svalue *rhs) override
1084 {
1085 m_inner->on_condition (lhs, op, rhs);
1086 }
1087
1088 void on_unknown_change (const svalue *sval, bool is_mutable) override
1089 {
1090 m_inner->on_unknown_change (sval, is_mutable);
1091 }
1092
1093 void on_phi (const gphi *phi, tree rhs) override
1094 {
1095 m_inner->on_phi (phi, rhs);
1096 }
1097
1098 void on_unexpected_tree_code (tree t,
1099 const dump_location_t &loc) override
1100 {
1101 m_inner->on_unexpected_tree_code (t, loc);
1102 }
1103
1104 void on_escaped_function (tree fndecl) override
1105 {
1106 m_inner->on_escaped_function (fndecl);
1107 }
1108
1109 uncertainty_t *get_uncertainty () override
1110 {
1111 return m_inner->get_uncertainty ();
1112 }
1113
1114 void purge_state_involving (const svalue *sval) override
1115 {
1116 m_inner->purge_state_involving (sval);
1117 }
1118
1119 void bifurcate (custom_edge_info *info) override
1120 {
1121 m_inner->bifurcate (info);
1122 }
1123
1124 void terminate_path () override
1125 {
1126 m_inner->terminate_path ();
1127 }
1128
1129 const extrinsic_state *get_ext_state () const override
1130 {
1131 return m_inner->get_ext_state ();
1132 }
1133
1134 bool get_malloc_map (sm_state_map **out_smap,
1135 const state_machine **out_sm,
1136 unsigned *out_sm_idx) override
1137 {
1138 return m_inner->get_malloc_map (out_smap, out_sm, out_sm_idx);
1139 }
1140
1141 bool get_taint_map (sm_state_map **out_smap,
1142 const state_machine **out_sm,
1143 unsigned *out_sm_idx) override
1144 {
1145 return m_inner->get_taint_map (out_smap, out_sm, out_sm_idx);
1146 }
1147
1148 const gimple *get_stmt () const override
1149 {
1150 return m_inner->get_stmt ();
1151 }
1152
1153 protected:
1154 region_model_context_decorator (region_model_context *inner)
1155 : m_inner (inner)
1156 {
1157 gcc_assert (m_inner);
1158 }
1159
1160 region_model_context *m_inner;
1161 };
1162
1163 /* Subclass of region_model_context_decorator that adds a note
1164 when saving diagnostics. */
1165
1166 class note_adding_context : public region_model_context_decorator
1167 {
1168 public:
1169 bool warn (pending_diagnostic *d) override
1170 {
1171 if (m_inner->warn (d))
1172 {
1173 add_note (make_note ());
1174 return true;
1175 }
1176 else
1177 return false;
1178 }
1179
1180 /* Hook to make the new note. */
1181 virtual pending_note *make_note () = 0;
1182
1183 protected:
1184 note_adding_context (region_model_context *inner)
1185 : region_model_context_decorator (inner)
1186 {
1187 }
1188 };
1189
1190 /* A bundle of data for use when attempting to merge two region_model
1191 instances to make a third. */
1192
1193 struct model_merger
1194 {
1195 model_merger (const region_model *model_a,
1196 const region_model *model_b,
1197 const program_point &point,
1198 region_model *merged_model,
1199 const extrinsic_state *ext_state,
1200 const program_state *state_a,
1201 const program_state *state_b)
1202 : m_model_a (model_a), m_model_b (model_b),
1203 m_point (point),
1204 m_merged_model (merged_model),
1205 m_ext_state (ext_state),
1206 m_state_a (state_a), m_state_b (state_b)
1207 {
1208 }
1209
1210 void dump_to_pp (pretty_printer *pp, bool simple) const;
1211 void dump (FILE *fp, bool simple) const;
1212 void dump (bool simple) const;
1213
1214 region_model_manager *get_manager () const
1215 {
1216 return m_model_a->get_manager ();
1217 }
1218
1219 bool mergeable_svalue_p (const svalue *) const;
1220
1221 const region_model *m_model_a;
1222 const region_model *m_model_b;
1223 const program_point &m_point;
1224 region_model *m_merged_model;
1225
1226 const extrinsic_state *m_ext_state;
1227 const program_state *m_state_a;
1228 const program_state *m_state_b;
1229 };
1230
1231 /* A record that can (optionally) be written out when
1232 region_model::add_constraint fails. */
1233
1234 class rejected_constraint
1235 {
1236 public:
1237 virtual ~rejected_constraint () {}
1238 virtual void dump_to_pp (pretty_printer *pp) const = 0;
1239
1240 const region_model &get_model () const { return m_model; }
1241
1242 protected:
1243 rejected_constraint (const region_model &model)
1244 : m_model (model)
1245 {}
1246
1247 region_model m_model;
1248 };
1249
1250 class rejected_op_constraint : public rejected_constraint
1251 {
1252 public:
1253 rejected_op_constraint (const region_model &model,
1254 tree lhs, enum tree_code op, tree rhs)
1255 : rejected_constraint (model),
1256 m_lhs (lhs), m_op (op), m_rhs (rhs)
1257 {}
1258
1259 void dump_to_pp (pretty_printer *pp) const final override;
1260
1261 tree m_lhs;
1262 enum tree_code m_op;
1263 tree m_rhs;
1264 };
1265
1266 class rejected_ranges_constraint : public rejected_constraint
1267 {
1268 public:
1269 rejected_ranges_constraint (const region_model &model,
1270 tree expr, const bounded_ranges *ranges)
1271 : rejected_constraint (model),
1272 m_expr (expr), m_ranges (ranges)
1273 {}
1274
1275 void dump_to_pp (pretty_printer *pp) const final override;
1276
1277 private:
1278 tree m_expr;
1279 const bounded_ranges *m_ranges;
1280 };
1281
1282 /* A bundle of state. */
1283
1284 class engine
1285 {
1286 public:
1287 engine (const supergraph *sg = NULL, logger *logger = NULL);
1288 const supergraph *get_supergraph () { return m_sg; }
1289 region_model_manager *get_model_manager () { return &m_mgr; }
1290
1291 void log_stats (logger *logger) const;
1292
1293 private:
1294 const supergraph *m_sg;
1295 region_model_manager m_mgr;
1296 };
1297
1298 } // namespace ana
1299
1300 extern void debug (const region_model &rmodel);
1301
1302 namespace ana {
1303
1304 #if CHECKING_P
1305
1306 namespace selftest {
1307
1308 using namespace ::selftest;
1309
1310 /* An implementation of region_model_context for use in selftests, which
1311 stores any pending_diagnostic instances passed to it. */
1312
1313 class test_region_model_context : public noop_region_model_context
1314 {
1315 public:
1316 bool warn (pending_diagnostic *d) final override
1317 {
1318 m_diagnostics.safe_push (d);
1319 return true;
1320 }
1321
1322 unsigned get_num_diagnostics () const { return m_diagnostics.length (); }
1323
1324 void on_unexpected_tree_code (tree t, const dump_location_t &)
1325 final override
1326 {
1327 internal_error ("unhandled tree code: %qs",
1328 get_tree_code_name (TREE_CODE (t)));
1329 }
1330
1331 private:
1332 /* Implicitly delete any diagnostics in the dtor. */
1333 auto_delete_vec<pending_diagnostic> m_diagnostics;
1334 };
1335
1336 /* Attempt to add the constraint (LHS OP RHS) to MODEL.
1337 Verify that MODEL remains satisfiable. */
1338
1339 #define ADD_SAT_CONSTRAINT(MODEL, LHS, OP, RHS) \
1340 SELFTEST_BEGIN_STMT \
1341 bool sat = (MODEL).add_constraint (LHS, OP, RHS, NULL); \
1342 ASSERT_TRUE (sat); \
1343 SELFTEST_END_STMT
1344
1345 /* Attempt to add the constraint (LHS OP RHS) to MODEL.
1346 Verify that the result is not satisfiable. */
1347
1348 #define ADD_UNSAT_CONSTRAINT(MODEL, LHS, OP, RHS) \
1349 SELFTEST_BEGIN_STMT \
1350 bool sat = (MODEL).add_constraint (LHS, OP, RHS, NULL); \
1351 ASSERT_FALSE (sat); \
1352 SELFTEST_END_STMT
1353
1354 /* Implementation detail of the ASSERT_CONDITION_* macros. */
1355
1356 void assert_condition (const location &loc,
1357 region_model &model,
1358 const svalue *lhs, tree_code op, const svalue *rhs,
1359 tristate expected);
1360
1361 void assert_condition (const location &loc,
1362 region_model &model,
1363 tree lhs, tree_code op, tree rhs,
1364 tristate expected);
1365
1366 /* Assert that REGION_MODEL evaluates the condition "LHS OP RHS"
1367 as "true". */
1368
1369 #define ASSERT_CONDITION_TRUE(REGION_MODEL, LHS, OP, RHS) \
1370 SELFTEST_BEGIN_STMT \
1371 assert_condition (SELFTEST_LOCATION, REGION_MODEL, LHS, OP, RHS, \
1372 tristate (tristate::TS_TRUE)); \
1373 SELFTEST_END_STMT
1374
1375 /* Assert that REGION_MODEL evaluates the condition "LHS OP RHS"
1376 as "false". */
1377
1378 #define ASSERT_CONDITION_FALSE(REGION_MODEL, LHS, OP, RHS) \
1379 SELFTEST_BEGIN_STMT \
1380 assert_condition (SELFTEST_LOCATION, REGION_MODEL, LHS, OP, RHS, \
1381 tristate (tristate::TS_FALSE)); \
1382 SELFTEST_END_STMT
1383
1384 /* Assert that REGION_MODEL evaluates the condition "LHS OP RHS"
1385 as "unknown". */
1386
1387 #define ASSERT_CONDITION_UNKNOWN(REGION_MODEL, LHS, OP, RHS) \
1388 SELFTEST_BEGIN_STMT \
1389 assert_condition (SELFTEST_LOCATION, REGION_MODEL, LHS, OP, RHS, \
1390 tristate (tristate::TS_UNKNOWN)); \
1391 SELFTEST_END_STMT
1392
1393 } /* end of namespace selftest. */
1394
1395 #endif /* #if CHECKING_P */
1396
1397 } // namespace ana
1398
1399 #endif /* GCC_ANALYZER_REGION_MODEL_H */