]>
Commit | Line | Data |
---|---|---|
808f4dfe DM |
1 | /* Regions of memory. |
2 | Copyright (C) 2019-2020 Free Software Foundation, Inc. | |
3 | Contributed by David Malcolm <dmalcolm@redhat.com>. | |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it | |
8 | under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 3, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but | |
13 | WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
15 | General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tree.h" | |
25 | #include "diagnostic-core.h" | |
26 | #include "gimple-pretty-print.h" | |
27 | #include "function.h" | |
28 | #include "basic-block.h" | |
29 | #include "gimple.h" | |
30 | #include "gimple-iterator.h" | |
31 | #include "diagnostic-core.h" | |
32 | #include "graphviz.h" | |
33 | #include "options.h" | |
34 | #include "cgraph.h" | |
35 | #include "tree-dfa.h" | |
36 | #include "stringpool.h" | |
37 | #include "convert.h" | |
38 | #include "target.h" | |
39 | #include "fold-const.h" | |
40 | #include "tree-pretty-print.h" | |
41 | #include "diagnostic-color.h" | |
42 | #include "diagnostic-metadata.h" | |
43 | #include "tristate.h" | |
44 | #include "bitmap.h" | |
45 | #include "selftest.h" | |
46 | #include "function.h" | |
47 | #include "analyzer/analyzer.h" | |
48 | #include "analyzer/analyzer-logging.h" | |
49 | #include "ordered-hash-map.h" | |
50 | #include "options.h" | |
51 | #include "cgraph.h" | |
52 | #include "cfg.h" | |
53 | #include "digraph.h" | |
54 | #include "analyzer/supergraph.h" | |
55 | #include "sbitmap.h" | |
56 | #include "analyzer/call-string.h" | |
57 | #include "analyzer/program-point.h" | |
58 | #include "analyzer/store.h" | |
59 | #include "analyzer/region-model.h" | |
60 | ||
61 | #if ENABLE_ANALYZER | |
62 | ||
63 | namespace ana { | |
64 | ||
65 | /* class region and its various subclasses. */ | |
66 | ||
67 | /* class region. */ | |
68 | ||
69 | region::~region () | |
70 | { | |
71 | delete m_cached_offset; | |
72 | } | |
73 | ||
74 | /* Compare REG1 and REG2 by id. */ | |
75 | ||
76 | int | |
77 | region::cmp_ids (const region *reg1, const region *reg2) | |
78 | { | |
79 | return (long)reg1->get_id () - (long)reg2->get_id (); | |
80 | } | |
81 | ||
82 | /* Determine the base region for this region: when considering bindings | |
83 | for this region, the base region is the ancestor which identifies | |
84 | which cluster they should be partitioned into. | |
85 | Regions within the same struct/union/array are in the same cluster. | |
86 | Different decls are in different clusters. */ | |
87 | ||
88 | const region * | |
89 | region::get_base_region () const | |
90 | { | |
91 | const region *iter = this; | |
92 | while (iter) | |
93 | { | |
94 | switch (iter->get_kind ()) | |
95 | { | |
96 | case RK_FIELD: | |
97 | case RK_ELEMENT: | |
98 | case RK_OFFSET: | |
99 | iter = iter->get_parent_region (); | |
100 | continue; | |
101 | case RK_CAST: | |
102 | iter = iter->dyn_cast_cast_region ()->get_original_region (); | |
103 | continue; | |
104 | default: | |
105 | return iter; | |
106 | } | |
107 | } | |
108 | return iter; | |
109 | } | |
110 | ||
111 | /* Return true if get_base_region() == this for this region. */ | |
112 | ||
113 | bool | |
114 | region::base_region_p () const | |
115 | { | |
116 | switch (get_kind ()) | |
117 | { | |
118 | /* Region kinds representing a descendent of a base region. */ | |
119 | case RK_FIELD: | |
120 | case RK_ELEMENT: | |
121 | case RK_OFFSET: | |
122 | case RK_CAST: | |
123 | return false; | |
124 | ||
125 | default: | |
126 | return true; | |
127 | } | |
128 | } | |
129 | ||
130 | /* Return true if this region is ELDER or one of its descendents. */ | |
131 | ||
132 | bool | |
133 | region::descendent_of_p (const region *elder) const | |
134 | { | |
135 | const region *iter = this; | |
136 | while (iter) | |
137 | { | |
138 | if (iter == elder) | |
139 | return true; | |
140 | if (iter->get_kind () == RK_CAST) | |
141 | iter = iter->dyn_cast_cast_region ()->get_original_region (); | |
142 | else | |
143 | iter = iter->get_parent_region (); | |
144 | } | |
145 | return false; | |
146 | } | |
147 | ||
148 | /* If this region is a frame_region, or a descendent of one, return it. | |
149 | Otherwise return NULL. */ | |
150 | ||
151 | const frame_region * | |
152 | region::maybe_get_frame_region () const | |
153 | { | |
154 | const region *iter = this; | |
155 | while (iter) | |
156 | { | |
157 | if (const frame_region *frame_reg = iter->dyn_cast_frame_region ()) | |
158 | return frame_reg; | |
159 | if (iter->get_kind () == RK_CAST) | |
160 | iter = iter->dyn_cast_cast_region ()->get_original_region (); | |
161 | else | |
162 | iter = iter->get_parent_region (); | |
163 | } | |
164 | return NULL; | |
165 | } | |
166 | ||
167 | /* If this region is a decl_region, return the decl. | |
168 | Otherwise return NULL. */ | |
169 | ||
170 | tree | |
171 | region::maybe_get_decl () const | |
172 | { | |
173 | if (const decl_region *decl_reg = dyn_cast_decl_region ()) | |
174 | return decl_reg->get_decl (); | |
175 | return NULL_TREE; | |
176 | } | |
177 | ||
178 | /* Get the region_offset for this region (calculating it on the | |
179 | first call and caching it internally). */ | |
180 | ||
181 | region_offset | |
182 | region::get_offset () const | |
183 | { | |
184 | if(!m_cached_offset) | |
185 | m_cached_offset = new region_offset (calc_offset ()); | |
186 | return *m_cached_offset; | |
187 | } | |
188 | ||
189 | /* If the size of this region (in bytes) is known statically, write it to *OUT | |
190 | and return true. | |
191 | Otherwise return false. */ | |
192 | ||
193 | bool | |
194 | region::get_byte_size (byte_size_t *out) const | |
195 | { | |
196 | tree type = get_type (); | |
197 | ||
198 | /* Bail out e.g. for heap-allocated regions. */ | |
199 | if (!type) | |
200 | return false; | |
201 | ||
202 | HOST_WIDE_INT bytes = int_size_in_bytes (type); | |
203 | if (bytes == -1) | |
204 | return false; | |
205 | *out = bytes; | |
206 | return true; | |
207 | } | |
208 | ||
209 | /* If the size of this region (in bits) is known statically, write it to *OUT | |
210 | and return true. | |
211 | Otherwise return false. */ | |
212 | ||
213 | bool | |
214 | region::get_bit_size (bit_size_t *out) const | |
215 | { | |
216 | byte_size_t byte_size; | |
217 | if (!get_byte_size (&byte_size)) | |
218 | return false; | |
219 | *out = byte_size * BITS_PER_UNIT; | |
220 | return true; | |
221 | } | |
222 | ||
223 | /* Get the field within RECORD_TYPE at BIT_OFFSET. */ | |
224 | ||
225 | static tree | |
226 | get_field_at_bit_offset (tree record_type, bit_offset_t bit_offset) | |
227 | { | |
228 | gcc_assert (TREE_CODE (record_type) == RECORD_TYPE); | |
400abebf DM |
229 | if (bit_offset < 0) |
230 | return NULL; | |
808f4dfe DM |
231 | |
232 | /* Find the first field that has an offset > BIT_OFFSET, | |
233 | then return the one preceding it. | |
234 | Skip other trees within the chain, such as FUNCTION_DECLs. */ | |
235 | tree last_field = NULL_TREE; | |
236 | for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE; | |
237 | iter = DECL_CHAIN (iter)) | |
238 | { | |
239 | if (TREE_CODE (iter) == FIELD_DECL) | |
240 | { | |
241 | int iter_field_offset = int_bit_position (iter); | |
242 | if (bit_offset < iter_field_offset) | |
243 | return last_field; | |
244 | last_field = iter; | |
245 | } | |
246 | } | |
247 | return last_field; | |
248 | } | |
249 | ||
250 | /* Populate *OUT with descendent regions of type TYPE that match | |
251 | RELATIVE_BIT_OFFSET and SIZE_IN_BITS within this region. */ | |
252 | ||
253 | void | |
254 | region::get_subregions_for_binding (region_model_manager *mgr, | |
255 | bit_offset_t relative_bit_offset, | |
256 | bit_size_t size_in_bits, | |
257 | tree type, | |
258 | auto_vec <const region *> *out) const | |
259 | { | |
42c5ae5d | 260 | if (get_type () == NULL_TREE || type == NULL_TREE) |
808f4dfe DM |
261 | return; |
262 | if (relative_bit_offset == 0 | |
263 | && types_compatible_p (get_type (), type)) | |
264 | { | |
265 | out->safe_push (this); | |
266 | return; | |
267 | } | |
268 | switch (TREE_CODE (get_type ())) | |
269 | { | |
270 | case ARRAY_TYPE: | |
271 | { | |
272 | tree element_type = TREE_TYPE (get_type ()); | |
273 | HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (element_type); | |
274 | if (hwi_byte_size > 0) | |
275 | { | |
276 | HOST_WIDE_INT bits_per_element | |
277 | = hwi_byte_size << LOG2_BITS_PER_UNIT; | |
278 | HOST_WIDE_INT element_index | |
279 | = (relative_bit_offset.to_shwi () / bits_per_element); | |
280 | tree element_index_cst | |
281 | = build_int_cst (integer_type_node, element_index); | |
282 | HOST_WIDE_INT inner_bit_offset | |
283 | = relative_bit_offset.to_shwi () % bits_per_element; | |
284 | const region *subregion = mgr->get_element_region | |
285 | (this, element_type, | |
286 | mgr->get_or_create_constant_svalue (element_index_cst)); | |
287 | subregion->get_subregions_for_binding (mgr, inner_bit_offset, | |
288 | size_in_bits, type, out); | |
289 | } | |
290 | } | |
291 | break; | |
292 | case RECORD_TYPE: | |
293 | { | |
294 | /* The bit offset might be *within* one of the fields (such as | |
295 | with nested structs). | |
296 | So we want to find the enclosing field, adjust the offset, | |
297 | and repeat. */ | |
298 | if (tree field = get_field_at_bit_offset (get_type (), | |
299 | relative_bit_offset)) | |
300 | { | |
301 | int field_bit_offset = int_bit_position (field); | |
302 | const region *subregion = mgr->get_field_region (this, field); | |
303 | subregion->get_subregions_for_binding | |
304 | (mgr, relative_bit_offset - field_bit_offset, | |
305 | size_in_bits, type, out); | |
306 | } | |
307 | } | |
308 | break; | |
309 | case UNION_TYPE: | |
310 | { | |
311 | for (tree field = TYPE_FIELDS (get_type ()); field != NULL_TREE; | |
312 | field = DECL_CHAIN (field)) | |
313 | { | |
00cb0f58 DM |
314 | if (TREE_CODE (field) != FIELD_DECL) |
315 | continue; | |
808f4dfe DM |
316 | const region *subregion = mgr->get_field_region (this, field); |
317 | subregion->get_subregions_for_binding (mgr, | |
318 | relative_bit_offset, | |
319 | size_in_bits, | |
320 | type, | |
321 | out); | |
322 | } | |
323 | } | |
324 | break; | |
325 | default: | |
326 | /* Do nothing. */ | |
327 | break; | |
328 | } | |
329 | } | |
330 | ||
331 | /* Walk from this region up to the base region within its cluster, calculating | |
332 | the offset relative to the base region, either as an offset in bits, | |
333 | or a symbolic offset. */ | |
334 | ||
335 | region_offset | |
336 | region::calc_offset () const | |
337 | { | |
338 | const region *iter_region = this; | |
339 | bit_offset_t accum_bit_offset = 0; | |
340 | ||
341 | while (iter_region) | |
342 | { | |
343 | switch (iter_region->get_kind ()) | |
344 | { | |
345 | case RK_FIELD: | |
346 | { | |
347 | const field_region *field_reg | |
348 | = (const field_region *)iter_region; | |
349 | iter_region = iter_region->get_parent_region (); | |
350 | ||
351 | /* Compare with e.g. gimple-fold.c's | |
352 | fold_nonarray_ctor_reference. */ | |
353 | tree field = field_reg->get_field (); | |
354 | tree byte_offset = DECL_FIELD_OFFSET (field); | |
355 | if (TREE_CODE (byte_offset) != INTEGER_CST) | |
356 | return region_offset::make_symbolic (iter_region); | |
357 | tree field_offset = DECL_FIELD_BIT_OFFSET (field); | |
358 | /* Compute bit offset of the field. */ | |
359 | offset_int bitoffset | |
360 | = (wi::to_offset (field_offset) | |
361 | + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT)); | |
362 | accum_bit_offset += bitoffset; | |
363 | } | |
364 | continue; | |
365 | ||
366 | case RK_ELEMENT: | |
367 | { | |
368 | const element_region *element_reg | |
369 | = (const element_region *)iter_region; | |
370 | iter_region = iter_region->get_parent_region (); | |
371 | ||
372 | if (tree idx_cst | |
373 | = element_reg->get_index ()->maybe_get_constant ()) | |
374 | { | |
375 | gcc_assert (TREE_CODE (idx_cst) == INTEGER_CST); | |
376 | ||
377 | tree elem_type = element_reg->get_type (); | |
378 | offset_int element_idx = wi::to_offset (idx_cst); | |
379 | ||
380 | /* First, use int_size_in_bytes, to reject the case where we | |
381 | have an incomplete type, or a non-constant value. */ | |
382 | HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type); | |
383 | if (hwi_byte_size > 0) | |
384 | { | |
385 | offset_int element_bit_size | |
386 | = hwi_byte_size << LOG2_BITS_PER_UNIT; | |
387 | offset_int element_bit_offset | |
388 | = element_idx * element_bit_size; | |
389 | accum_bit_offset += element_bit_offset; | |
390 | continue; | |
391 | } | |
392 | } | |
393 | return region_offset::make_symbolic (iter_region); | |
394 | } | |
395 | continue; | |
396 | ||
397 | case RK_OFFSET: | |
398 | { | |
399 | const offset_region *offset_reg | |
400 | = (const offset_region *)iter_region; | |
401 | iter_region = iter_region->get_parent_region (); | |
402 | ||
403 | if (tree byte_offset_cst | |
404 | = offset_reg->get_byte_offset ()->maybe_get_constant ()) | |
405 | { | |
406 | gcc_assert (TREE_CODE (byte_offset_cst) == INTEGER_CST); | |
407 | /* Use a signed value for the byte offset, to handle | |
408 | negative offsets. */ | |
409 | HOST_WIDE_INT byte_offset | |
410 | = wi::to_offset (byte_offset_cst).to_shwi (); | |
411 | HOST_WIDE_INT bit_offset = byte_offset * BITS_PER_UNIT; | |
412 | accum_bit_offset += bit_offset; | |
413 | } | |
414 | else | |
415 | return region_offset::make_symbolic (iter_region); | |
416 | } | |
417 | continue; | |
418 | ||
419 | case RK_CAST: | |
420 | { | |
421 | const cast_region *cast_reg | |
422 | = as_a <const cast_region *> (iter_region); | |
423 | iter_region = cast_reg->get_original_region (); | |
424 | } | |
425 | continue; | |
426 | ||
427 | default: | |
428 | return region_offset::make_concrete (iter_region, accum_bit_offset); | |
429 | } | |
430 | } | |
431 | return region_offset::make_concrete (iter_region, accum_bit_offset); | |
432 | } | |
433 | ||
434 | /* Copy from SRC_REG to DST_REG, using CTXT for any issues that occur. */ | |
435 | ||
436 | void | |
437 | region_model::copy_region (const region *dst_reg, const region *src_reg, | |
438 | region_model_context *ctxt) | |
439 | { | |
440 | gcc_assert (dst_reg); | |
441 | gcc_assert (src_reg); | |
442 | if (dst_reg == src_reg) | |
443 | return; | |
444 | ||
445 | const svalue *sval = get_store_value (src_reg); | |
446 | set_value (dst_reg, sval, ctxt); | |
447 | } | |
448 | ||
449 | /* Dump a description of this region to stderr. */ | |
450 | ||
451 | DEBUG_FUNCTION void | |
452 | region::dump (bool simple) const | |
453 | { | |
454 | pretty_printer pp; | |
455 | pp_format_decoder (&pp) = default_tree_printer; | |
456 | pp_show_color (&pp) = pp_show_color (global_dc->printer); | |
457 | pp.buffer->stream = stderr; | |
458 | dump_to_pp (&pp, simple); | |
459 | pp_newline (&pp); | |
460 | pp_flush (&pp); | |
461 | } | |
462 | ||
463 | /* Generate a description of this region. */ | |
464 | ||
465 | DEBUG_FUNCTION label_text | |
466 | region::get_desc (bool simple) const | |
467 | { | |
468 | pretty_printer pp; | |
469 | pp_format_decoder (&pp) = default_tree_printer; | |
470 | dump_to_pp (&pp, simple); | |
471 | return label_text::take (xstrdup (pp_formatted_text (&pp))); | |
472 | } | |
473 | ||
474 | /* Base implementation of region::accept vfunc. | |
475 | Subclass implementations should chain up to this. */ | |
476 | ||
477 | void | |
478 | region::accept (visitor *v) const | |
479 | { | |
480 | v->visit_region (this); | |
481 | if (m_parent) | |
482 | m_parent->accept (v); | |
483 | } | |
484 | ||
485 | /* Return true if this is a symbolic region for deferencing an | |
486 | unknown ptr. | |
487 | We shouldn't attempt to bind values for this region (but | |
488 | can unbind values for other regions). */ | |
489 | ||
490 | bool | |
491 | region::symbolic_for_unknown_ptr_p () const | |
492 | { | |
493 | if (const symbolic_region *sym_reg = dyn_cast_symbolic_region ()) | |
494 | if (sym_reg->get_pointer ()->get_kind () == SK_UNKNOWN) | |
495 | return true; | |
496 | return false; | |
497 | } | |
498 | ||
499 | /* region's ctor. */ | |
500 | ||
501 | region::region (complexity c, unsigned id, const region *parent, tree type) | |
502 | : m_complexity (c), m_id (id), m_parent (parent), m_type (type), | |
503 | m_cached_offset (NULL) | |
504 | { | |
505 | gcc_assert (type == NULL_TREE || TYPE_P (type)); | |
506 | } | |
507 | ||
508 | /* Comparator for regions, using their IDs to order them. */ | |
509 | ||
510 | int | |
511 | region::cmp_ptrs (const void *p1, const void *p2) | |
512 | { | |
513 | const region * const *reg1 = (const region * const *)p1; | |
514 | const region * const *reg2 = (const region * const *)p2; | |
515 | ||
516 | return cmp_ids (*reg1, *reg2); | |
517 | } | |
518 | ||
519 | /* Determine if a pointer to this region must be non-NULL. | |
520 | ||
521 | Generally, pointers to regions must be non-NULL, but pointers | |
522 | to symbolic_regions might, in fact, be NULL. | |
523 | ||
524 | This allows us to simulate functions like malloc and calloc with: | |
525 | - only one "outcome" from each statement, | |
526 | - the idea that the pointer is on the heap if non-NULL | |
527 | - the possibility that the pointer could be NULL | |
528 | - the idea that successive values returned from malloc are non-equal | |
529 | - to be able to zero-fill for calloc. */ | |
530 | ||
531 | bool | |
532 | region::non_null_p () const | |
533 | { | |
534 | switch (get_kind ()) | |
535 | { | |
536 | default: | |
537 | return true; | |
538 | case RK_SYMBOLIC: | |
539 | /* Are we within a symbolic_region? If so, it could be NULL, and we | |
540 | have to fall back on the constraints. */ | |
541 | return false; | |
542 | case RK_HEAP_ALLOCATED: | |
543 | return false; | |
544 | } | |
545 | } | |
546 | ||
547 | /* Comparator for trees to impose a deterministic ordering on | |
548 | T1 and T2. */ | |
549 | ||
550 | static int | |
551 | tree_cmp (const_tree t1, const_tree t2) | |
552 | { | |
553 | gcc_assert (t1); | |
554 | gcc_assert (t2); | |
555 | ||
556 | /* Test tree codes first. */ | |
557 | if (TREE_CODE (t1) != TREE_CODE (t2)) | |
558 | return TREE_CODE (t1) - TREE_CODE (t2); | |
559 | ||
560 | /* From this point on, we know T1 and T2 have the same tree code. */ | |
561 | ||
562 | if (DECL_P (t1)) | |
563 | { | |
564 | if (DECL_NAME (t1) && DECL_NAME (t2)) | |
565 | return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1)), | |
566 | IDENTIFIER_POINTER (DECL_NAME (t2))); | |
567 | else | |
568 | { | |
569 | if (DECL_NAME (t1)) | |
570 | return -1; | |
571 | else if (DECL_NAME (t2)) | |
572 | return 1; | |
573 | else | |
574 | return DECL_UID (t1) - DECL_UID (t2); | |
575 | } | |
576 | } | |
577 | ||
578 | switch (TREE_CODE (t1)) | |
579 | { | |
580 | case SSA_NAME: | |
581 | { | |
582 | if (SSA_NAME_VAR (t1) && SSA_NAME_VAR (t2)) | |
583 | { | |
584 | int var_cmp = tree_cmp (SSA_NAME_VAR (t1), SSA_NAME_VAR (t2)); | |
585 | if (var_cmp) | |
586 | return var_cmp; | |
587 | return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2); | |
588 | } | |
589 | else | |
590 | { | |
591 | if (SSA_NAME_VAR (t1)) | |
592 | return -1; | |
593 | else if (SSA_NAME_VAR (t2)) | |
594 | return 1; | |
595 | else | |
596 | return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2); | |
597 | } | |
598 | } | |
599 | break; | |
600 | ||
601 | case INTEGER_CST: | |
602 | return tree_int_cst_compare (t1, t2); | |
603 | ||
604 | case REAL_CST: | |
605 | { | |
606 | const real_value *rv1 = TREE_REAL_CST_PTR (t1); | |
607 | const real_value *rv2 = TREE_REAL_CST_PTR (t2); | |
608 | if (real_compare (UNORDERED_EXPR, rv1, rv2)) | |
609 | { | |
610 | /* Impose an arbitrary order on NaNs relative to other NaNs | |
611 | and to non-NaNs. */ | |
612 | if (int cmp_isnan = real_isnan (rv1) - real_isnan (rv2)) | |
613 | return cmp_isnan; | |
614 | if (int cmp_issignaling_nan | |
615 | = real_issignaling_nan (rv1) - real_issignaling_nan (rv2)) | |
616 | return cmp_issignaling_nan; | |
617 | return real_isneg (rv1) - real_isneg (rv2); | |
618 | } | |
619 | if (real_compare (LT_EXPR, rv1, rv2)) | |
620 | return -1; | |
621 | if (real_compare (GT_EXPR, rv1, rv2)) | |
622 | return 1; | |
623 | return 0; | |
624 | } | |
625 | ||
626 | case STRING_CST: | |
627 | return strcmp (TREE_STRING_POINTER (t1), | |
628 | TREE_STRING_POINTER (t2)); | |
629 | ||
630 | default: | |
631 | gcc_unreachable (); | |
632 | break; | |
633 | } | |
634 | ||
635 | gcc_unreachable (); | |
636 | ||
637 | return 0; | |
638 | } | |
639 | ||
640 | /* qsort comparator for trees to impose a deterministic ordering on | |
641 | P1 and P2. */ | |
642 | ||
643 | int | |
644 | tree_cmp (const void *p1, const void *p2) | |
645 | { | |
646 | const_tree t1 = *(const_tree const *)p1; | |
647 | const_tree t2 = *(const_tree const *)p2; | |
648 | ||
649 | return tree_cmp (t1, t2); | |
650 | } | |
651 | ||
652 | /* class frame_region : public space_region. */ | |
653 | ||
654 | frame_region::~frame_region () | |
655 | { | |
656 | for (map_t::iterator iter = m_locals.begin (); | |
657 | iter != m_locals.end (); | |
658 | ++iter) | |
659 | delete (*iter).second; | |
660 | } | |
661 | ||
662 | void | |
663 | frame_region::accept (visitor *v) const | |
664 | { | |
665 | region::accept (v); | |
666 | if (m_calling_frame) | |
667 | m_calling_frame->accept (v); | |
668 | } | |
669 | ||
670 | /* Implementation of region::dump_to_pp vfunc for frame_region. */ | |
671 | ||
672 | void | |
673 | frame_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
674 | { | |
675 | if (simple) | |
676 | pp_printf (pp, "frame: %qs@%i", function_name (m_fun), get_stack_depth ()); | |
677 | else | |
678 | pp_printf (pp, "frame_region(%qs, index: %i, depth: %i)", | |
679 | function_name (m_fun), m_index, get_stack_depth ()); | |
680 | } | |
681 | ||
682 | const decl_region * | |
683 | frame_region::get_region_for_local (region_model_manager *mgr, | |
684 | tree expr) const | |
685 | { | |
686 | // TODO: could also check that VAR_DECLs are locals | |
687 | gcc_assert (TREE_CODE (expr) == PARM_DECL | |
688 | || TREE_CODE (expr) == VAR_DECL | |
689 | || TREE_CODE (expr) == SSA_NAME | |
690 | || TREE_CODE (expr) == RESULT_DECL); | |
691 | ||
692 | /* Ideally we'd use mutable here. */ | |
693 | map_t &mutable_locals = const_cast <map_t &> (m_locals); | |
694 | ||
695 | if (decl_region **slot = mutable_locals.get (expr)) | |
696 | return *slot; | |
697 | decl_region *reg | |
698 | = new decl_region (mgr->alloc_region_id (), this, expr); | |
699 | mutable_locals.put (expr, reg); | |
700 | return reg; | |
701 | } | |
702 | ||
703 | /* class globals_region : public space_region. */ | |
704 | ||
705 | /* Implementation of region::dump_to_pp vfunc for globals_region. */ | |
706 | ||
707 | void | |
708 | globals_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
709 | { | |
710 | if (simple) | |
711 | pp_string (pp, "::"); | |
712 | else | |
713 | pp_string (pp, "globals"); | |
714 | } | |
715 | ||
716 | /* class code_region : public map_region. */ | |
717 | ||
718 | /* Implementation of region::dump_to_pp vfunc for code_region. */ | |
719 | ||
720 | void | |
721 | code_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
722 | { | |
723 | if (simple) | |
724 | pp_string (pp, "code region"); | |
725 | else | |
726 | pp_string (pp, "code_region()"); | |
727 | } | |
728 | ||
729 | /* class function_region : public region. */ | |
730 | ||
731 | /* Implementation of region::dump_to_pp vfunc for function_region. */ | |
732 | ||
733 | void | |
734 | function_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
735 | { | |
736 | if (simple) | |
737 | { | |
738 | dump_quoted_tree (pp, m_fndecl); | |
739 | } | |
740 | else | |
741 | { | |
742 | pp_string (pp, "function_region("); | |
743 | dump_quoted_tree (pp, m_fndecl); | |
744 | pp_string (pp, ")"); | |
745 | } | |
746 | } | |
747 | ||
748 | /* class label_region : public region. */ | |
749 | ||
750 | /* Implementation of region::dump_to_pp vfunc for label_region. */ | |
751 | ||
752 | void | |
753 | label_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
754 | { | |
755 | if (simple) | |
756 | { | |
757 | dump_quoted_tree (pp, m_label); | |
758 | } | |
759 | else | |
760 | { | |
761 | pp_string (pp, "label_region("); | |
762 | dump_quoted_tree (pp, m_label); | |
763 | pp_string (pp, ")"); | |
764 | } | |
765 | } | |
766 | ||
767 | /* class stack_region : public region. */ | |
768 | ||
769 | /* Implementation of region::dump_to_pp vfunc for stack_region. */ | |
770 | ||
771 | void | |
772 | stack_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
773 | { | |
774 | if (simple) | |
775 | pp_string (pp, "stack region"); | |
776 | else | |
777 | pp_string (pp, "stack_region()"); | |
778 | } | |
779 | ||
780 | /* class heap_region : public region. */ | |
781 | ||
782 | /* Implementation of region::dump_to_pp vfunc for heap_region. */ | |
783 | ||
784 | void | |
785 | heap_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
786 | { | |
787 | if (simple) | |
788 | pp_string (pp, "heap region"); | |
789 | else | |
790 | pp_string (pp, "heap_region()"); | |
791 | } | |
792 | ||
793 | /* class root_region : public region. */ | |
794 | ||
795 | /* root_region's ctor. */ | |
796 | ||
797 | root_region::root_region (unsigned id) | |
798 | : region (complexity (1, 1), id, NULL, NULL_TREE) | |
799 | { | |
800 | } | |
801 | ||
802 | /* Implementation of region::dump_to_pp vfunc for root_region. */ | |
803 | ||
804 | void | |
805 | root_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
806 | { | |
807 | if (simple) | |
808 | pp_string (pp, "root region"); | |
809 | else | |
810 | pp_string (pp, "root_region()"); | |
811 | } | |
812 | ||
813 | /* class symbolic_region : public map_region. */ | |
814 | ||
815 | /* Implementation of region::accept vfunc for symbolic_region. */ | |
816 | ||
817 | void | |
818 | symbolic_region::accept (visitor *v) const | |
819 | { | |
820 | region::accept (v); | |
821 | m_sval_ptr->accept (v); | |
822 | } | |
823 | ||
824 | /* Implementation of region::dump_to_pp vfunc for symbolic_region. */ | |
825 | ||
826 | void | |
827 | symbolic_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
828 | { | |
829 | if (simple) | |
830 | { | |
831 | pp_string (pp, "(*"); | |
832 | m_sval_ptr->dump_to_pp (pp, simple); | |
833 | pp_string (pp, ")"); | |
834 | } | |
835 | else | |
836 | { | |
837 | pp_string (pp, "symbolic_region("); | |
838 | get_parent_region ()->dump_to_pp (pp, simple); | |
839 | pp_string (pp, ", "); | |
840 | print_quoted_type (pp, get_type ()); | |
841 | pp_string (pp, ", "); | |
842 | m_sval_ptr->dump_to_pp (pp, simple); | |
843 | pp_string (pp, ")"); | |
844 | } | |
845 | } | |
846 | ||
847 | /* class decl_region : public region. */ | |
848 | ||
849 | /* Implementation of region::dump_to_pp vfunc for decl_region. */ | |
850 | ||
851 | void | |
852 | decl_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
853 | { | |
854 | if (simple) | |
855 | pp_printf (pp, "%E", m_decl); | |
856 | else | |
857 | { | |
858 | pp_string (pp, "decl_region("); | |
859 | get_parent_region ()->dump_to_pp (pp, simple); | |
860 | pp_string (pp, ", "); | |
861 | print_quoted_type (pp, get_type ()); | |
862 | pp_printf (pp, ", %qE)", m_decl); | |
863 | } | |
864 | } | |
865 | ||
866 | /* Get the stack depth for the frame containing this decl, or 0 | |
867 | for a global. */ | |
868 | ||
869 | int | |
870 | decl_region::get_stack_depth () const | |
871 | { | |
872 | if (get_parent_region () == NULL) | |
873 | return 0; | |
874 | if (const frame_region *frame_reg | |
875 | = get_parent_region ()->dyn_cast_frame_region ()) | |
876 | return frame_reg->get_stack_depth (); | |
877 | return 0; | |
878 | } | |
879 | ||
2867118d DM |
880 | /* If the underlying decl is in the global constant pool, |
881 | return an svalue representing the constant value. | |
882 | Otherwise return NULL. */ | |
883 | ||
884 | const svalue * | |
885 | decl_region::maybe_get_constant_value (region_model_manager *mgr) const | |
886 | { | |
887 | if (TREE_CODE (m_decl) == VAR_DECL | |
888 | && DECL_IN_CONSTANT_POOL (m_decl) | |
889 | && DECL_INITIAL (m_decl) | |
890 | && TREE_CODE (DECL_INITIAL (m_decl)) == CONSTRUCTOR) | |
623bc027 DM |
891 | return get_svalue_for_constructor (DECL_INITIAL (m_decl), mgr); |
892 | return NULL; | |
893 | } | |
2867118d | 894 | |
623bc027 | 895 | /* Get an svalue for CTOR, a CONSTRUCTOR for this region's decl. */ |
2867118d | 896 | |
623bc027 DM |
897 | const svalue * |
898 | decl_region::get_svalue_for_constructor (tree ctor, | |
899 | region_model_manager *mgr) const | |
900 | { | |
901 | gcc_assert (!TREE_CLOBBER_P (ctor)); | |
902 | ||
903 | /* Create a binding map, applying ctor to it, using this | |
904 | decl_region as the base region when building child regions | |
905 | for offset calculations. */ | |
906 | binding_map map; | |
907 | map.apply_ctor_to_region (this, ctor, mgr); | |
908 | ||
909 | /* Return a compound svalue for the map we built. */ | |
910 | return mgr->get_or_create_compound_svalue (get_type (), map); | |
911 | } | |
912 | ||
913 | /* For use on decl_regions for global variables. | |
914 | ||
915 | Get an svalue for the initial value of this region at entry to | |
916 | "main" (either based on DECL_INITIAL, or implicit initialization to | |
917 | zero. */ | |
918 | ||
919 | const svalue * | |
920 | decl_region::get_svalue_for_initializer (region_model_manager *mgr) const | |
921 | { | |
922 | tree init = DECL_INITIAL (m_decl); | |
923 | if (!init) | |
924 | { | |
925 | /* Implicit initialization to zero; use a compound_svalue for it. */ | |
926 | binding_cluster c (this); | |
927 | c.zero_fill_region (mgr->get_store_manager (), this); | |
928 | return mgr->get_or_create_compound_svalue (TREE_TYPE (m_decl), | |
929 | c.get_map ()); | |
930 | } | |
931 | ||
932 | if (TREE_CODE (init) == CONSTRUCTOR) | |
933 | return get_svalue_for_constructor (init, mgr); | |
934 | ||
935 | /* Reuse the get_rvalue logic from region_model. */ | |
936 | region_model m (mgr); | |
937 | return m.get_rvalue (path_var (init, 0), NULL); | |
2867118d DM |
938 | } |
939 | ||
808f4dfe DM |
940 | /* class field_region : public region. */ |
941 | ||
942 | /* Implementation of region::dump_to_pp vfunc for field_region. */ | |
943 | ||
944 | void | |
945 | field_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
946 | { | |
947 | if (simple) | |
948 | { | |
949 | get_parent_region ()->dump_to_pp (pp, simple); | |
950 | pp_string (pp, "."); | |
951 | pp_printf (pp, "%E", m_field); | |
952 | } | |
953 | else | |
954 | { | |
955 | pp_string (pp, "field_region("); | |
956 | get_parent_region ()->dump_to_pp (pp, simple); | |
957 | pp_string (pp, ", "); | |
958 | print_quoted_type (pp, get_type ()); | |
959 | pp_printf (pp, ", %qE)", m_field); | |
960 | } | |
961 | } | |
962 | ||
963 | /* class element_region : public region. */ | |
964 | ||
965 | /* Implementation of region::accept vfunc for element_region. */ | |
966 | ||
967 | void | |
968 | element_region::accept (visitor *v) const | |
969 | { | |
970 | region::accept (v); | |
971 | m_index->accept (v); | |
972 | } | |
973 | ||
974 | /* Implementation of region::dump_to_pp vfunc for element_region. */ | |
975 | ||
976 | void | |
977 | element_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
978 | { | |
979 | if (simple) | |
980 | { | |
981 | //pp_string (pp, "("); | |
982 | get_parent_region ()->dump_to_pp (pp, simple); | |
983 | pp_string (pp, "["); | |
984 | m_index->dump_to_pp (pp, simple); | |
985 | pp_string (pp, "]"); | |
986 | //pp_string (pp, ")"); | |
987 | } | |
988 | else | |
989 | { | |
990 | pp_string (pp, "element_region("); | |
991 | get_parent_region ()->dump_to_pp (pp, simple); | |
992 | pp_string (pp, ", "); | |
993 | print_quoted_type (pp, get_type ()); | |
994 | pp_string (pp, ", "); | |
995 | m_index->dump_to_pp (pp, simple); | |
996 | pp_printf (pp, ")"); | |
997 | } | |
998 | } | |
999 | ||
1000 | /* class offset_region : public region. */ | |
1001 | ||
1002 | /* Implementation of region::accept vfunc for offset_region. */ | |
1003 | ||
1004 | void | |
1005 | offset_region::accept (visitor *v) const | |
1006 | { | |
1007 | region::accept (v); | |
1008 | m_byte_offset->accept (v); | |
1009 | } | |
1010 | ||
1011 | /* Implementation of region::dump_to_pp vfunc for offset_region. */ | |
1012 | ||
1013 | void | |
1014 | offset_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
1015 | { | |
1016 | if (simple) | |
1017 | { | |
1018 | //pp_string (pp, "("); | |
1019 | get_parent_region ()->dump_to_pp (pp, simple); | |
1020 | pp_string (pp, "+"); | |
1021 | m_byte_offset->dump_to_pp (pp, simple); | |
1022 | //pp_string (pp, ")"); | |
1023 | } | |
1024 | else | |
1025 | { | |
1026 | pp_string (pp, "offset_region("); | |
1027 | get_parent_region ()->dump_to_pp (pp, simple); | |
1028 | pp_string (pp, ", "); | |
1029 | print_quoted_type (pp, get_type ()); | |
1030 | pp_string (pp, ", "); | |
1031 | m_byte_offset->dump_to_pp (pp, simple); | |
1032 | pp_printf (pp, ")"); | |
1033 | } | |
1034 | } | |
1035 | ||
1036 | /* class cast_region : public region. */ | |
1037 | ||
1038 | /* Implementation of region::accept vfunc for cast_region. */ | |
1039 | ||
1040 | void | |
1041 | cast_region::accept (visitor *v) const | |
1042 | { | |
1043 | region::accept (v); | |
1044 | m_original_region->accept (v); | |
1045 | } | |
1046 | ||
1047 | /* Implementation of region::dump_to_pp vfunc for cast_region. */ | |
1048 | ||
1049 | void | |
1050 | cast_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
1051 | { | |
1052 | if (simple) | |
1053 | { | |
1054 | pp_string (pp, "CAST_REG("); | |
1055 | print_quoted_type (pp, get_type ()); | |
1056 | pp_string (pp, ", "); | |
1057 | m_original_region->dump_to_pp (pp, simple); | |
1058 | pp_string (pp, ")"); | |
1059 | } | |
1060 | else | |
1061 | { | |
1062 | pp_string (pp, "cast_region("); | |
1063 | m_original_region->dump_to_pp (pp, simple); | |
1064 | pp_string (pp, ", "); | |
1065 | print_quoted_type (pp, get_type ()); | |
1066 | pp_printf (pp, ")"); | |
1067 | } | |
1068 | } | |
1069 | ||
1070 | /* class heap_allocated_region : public region. */ | |
1071 | ||
1072 | /* Implementation of region::dump_to_pp vfunc for heap_allocated_region. */ | |
1073 | ||
1074 | void | |
1075 | heap_allocated_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
1076 | { | |
1077 | if (simple) | |
1078 | pp_printf (pp, "HEAP_ALLOCATED_REGION(%i)", get_id ()); | |
1079 | else | |
1080 | pp_printf (pp, "heap_allocated_region(%i)", get_id ()); | |
1081 | } | |
1082 | ||
1083 | /* class alloca_region : public region. */ | |
1084 | ||
1085 | /* Implementation of region::dump_to_pp vfunc for alloca_region. */ | |
1086 | ||
1087 | void | |
1088 | alloca_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
1089 | { | |
1090 | if (simple) | |
1091 | pp_string (pp, "ALLOCA_REGION"); | |
1092 | else | |
1093 | pp_string (pp, "alloca_region()"); | |
1094 | } | |
1095 | ||
1096 | /* class string_region : public region. */ | |
1097 | ||
1098 | /* Implementation of region::dump_to_pp vfunc for string_region. */ | |
1099 | ||
1100 | void | |
1101 | string_region::dump_to_pp (pretty_printer *pp, bool simple) const | |
1102 | { | |
1103 | if (simple) | |
1104 | dump_tree (pp, m_string_cst); | |
1105 | else | |
1106 | { | |
1107 | pp_string (pp, "string_region("); | |
1108 | dump_tree (pp, m_string_cst); | |
1109 | pp_string (pp, " ("); | |
1110 | pp_pointer (pp, m_string_cst); | |
1111 | pp_string (pp, "))"); | |
1112 | } | |
1113 | } | |
1114 | ||
1115 | /* class unknown_region : public region. */ | |
1116 | ||
1117 | /* Implementation of region::dump_to_pp vfunc for unknown_region. */ | |
1118 | ||
1119 | void | |
1120 | unknown_region::dump_to_pp (pretty_printer *pp, bool /*simple*/) const | |
1121 | { | |
1122 | pp_string (pp, "UNKNOWN_REGION"); | |
1123 | } | |
1124 | ||
1125 | } // namespace ana | |
1126 | ||
1127 | #endif /* #if ENABLE_ANALYZER */ |