]>
Commit | Line | Data |
---|---|---|
4ee9c684 | 1 | /* Liveness for SSA trees. |
3aea1f79 | 2 | Copyright (C) 2003-2014 Free Software Foundation, Inc. |
4ee9c684 | 3 | Contributed by Andrew MacLeod <amacleod@redhat.com> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
8c4c00c1 | 9 | the Free Software Foundation; either version 3, or (at your option) |
4ee9c684 | 10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
d9dd21a8 | 24 | #include "hash-table.h" |
4ee9c684 | 25 | #include "tm.h" |
26 | #include "tree.h" | |
ce084dfc | 27 | #include "gimple-pretty-print.h" |
4ee9c684 | 28 | #include "bitmap.h" |
424a4a92 | 29 | #include "sbitmap.h" |
94ea8568 | 30 | #include "predict.h" |
31 | #include "vec.h" | |
32 | #include "hashtab.h" | |
33 | #include "hash-set.h" | |
34 | #include "machmode.h" | |
35 | #include "hard-reg-set.h" | |
36 | #include "input.h" | |
37 | #include "function.h" | |
38 | #include "dominance.h" | |
39 | #include "cfg.h" | |
bc61cadb | 40 | #include "basic-block.h" |
41 | #include "tree-ssa-alias.h" | |
42 | #include "internal-fn.h" | |
43 | #include "gimple-expr.h" | |
44 | #include "is-a.h" | |
073c1fd5 | 45 | #include "gimple.h" |
dcf1a1ec | 46 | #include "gimple-iterator.h" |
073c1fd5 | 47 | #include "gimple-ssa.h" |
48 | #include "tree-phinodes.h" | |
49 | #include "ssa-iterators.h" | |
9ed99284 | 50 | #include "stringpool.h" |
073c1fd5 | 51 | #include "tree-ssanames.h" |
9ed99284 | 52 | #include "expr.h" |
073c1fd5 | 53 | #include "tree-dfa.h" |
b9ed1410 | 54 | #include "timevar.h" |
55 | #include "dumpfile.h" | |
4ee9c684 | 56 | #include "tree-ssa-live.h" |
0b205f4c | 57 | #include "diagnostic-core.h" |
2ff2a12b | 58 | #include "debug.h" |
59 | #include "flags.h" | |
4d8d655b | 60 | #include "tree-ssa.h" |
2d043327 | 61 | |
30928c44 | 62 | #ifdef ENABLE_CHECKING |
63 | static void verify_live_on_entry (tree_live_info_p); | |
64 | #endif | |
4ee9c684 | 65 | |
4ee9c684 | 66 | |
2d043327 | 67 | /* VARMAP maintains a mapping from SSA version number to real variables. |
68 | ||
69 | All SSA_NAMES are divided into partitions. Initially each ssa_name is the | |
70 | only member of it's own partition. Coalescing will attempt to group any | |
71 | ssa_names which occur in a copy or in a PHI node into the same partition. | |
72 | ||
73 | At the end of out-of-ssa, each partition becomes a "real" variable and is | |
74 | rewritten as a compiler variable. | |
75 | ||
f0b5f617 | 76 | The var_map data structure is used to manage these partitions. It allows |
2d043327 | 77 | partitions to be combined, and determines which partition belongs to what |
78 | ssa_name or variable, and vice versa. */ | |
79 | ||
80 | ||
d9dd21a8 | 81 | /* Hashtable helpers. */ |
82 | ||
83 | struct tree_int_map_hasher : typed_noop_remove <tree_int_map> | |
84 | { | |
85 | typedef tree_int_map value_type; | |
86 | typedef tree_int_map compare_type; | |
87 | static inline hashval_t hash (const value_type *); | |
88 | static inline bool equal (const value_type *, const compare_type *); | |
89 | }; | |
90 | ||
91 | inline hashval_t | |
92 | tree_int_map_hasher::hash (const value_type *v) | |
93 | { | |
94 | return tree_map_base_hash (v); | |
95 | } | |
96 | ||
97 | inline bool | |
98 | tree_int_map_hasher::equal (const value_type *v, const compare_type *c) | |
99 | { | |
100 | return tree_int_map_eq (v, c); | |
101 | } | |
102 | ||
103 | ||
2d043327 | 104 | /* This routine will initialize the basevar fields of MAP. */ |
105 | ||
106 | static void | |
107 | var_map_base_init (var_map map) | |
108 | { | |
4ae5778c | 109 | int x, num_part; |
2d043327 | 110 | tree var; |
4ae5778c | 111 | struct tree_int_map *m, *mapstorage; |
48e1416a | 112 | |
2d043327 | 113 | num_part = num_var_partitions (map); |
c1f445d2 | 114 | hash_table<tree_int_map_hasher> tree_to_index (num_part); |
4ae5778c | 115 | /* We can have at most num_part entries in the hash tables, so it's |
116 | enough to allocate so many map elements once, saving some malloc | |
117 | calls. */ | |
118 | mapstorage = m = XNEWVEC (struct tree_int_map, num_part); | |
2d043327 | 119 | |
120 | /* If a base table already exists, clear it, otherwise create it. */ | |
4ae5778c | 121 | free (map->partition_to_base_index); |
2d043327 | 122 | map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part); |
123 | ||
124 | /* Build the base variable list, and point partitions at their bases. */ | |
125 | for (x = 0; x < num_part; x++) | |
126 | { | |
4ae5778c | 127 | struct tree_int_map **slot; |
128 | unsigned baseindex; | |
2d043327 | 129 | var = partition_to_var (map, x); |
688425e8 | 130 | if (SSA_NAME_VAR (var) |
131 | && (!VAR_P (SSA_NAME_VAR (var)) | |
132 | || !DECL_IGNORED_P (SSA_NAME_VAR (var)))) | |
ec11736b | 133 | m->base.from = SSA_NAME_VAR (var); |
134 | else | |
135 | /* This restricts what anonymous SSA names we can coalesce | |
136 | as it restricts the sets we compute conflicts for. | |
137 | Using TREE_TYPE to generate sets is the easies as | |
138 | type equivalency also holds for SSA names with the same | |
f82f0ea5 | 139 | underlying decl. |
140 | ||
141 | Check gimple_can_coalesce_p when changing this code. */ | |
142 | m->base.from = (TYPE_CANONICAL (TREE_TYPE (var)) | |
143 | ? TYPE_CANONICAL (TREE_TYPE (var)) | |
144 | : TREE_TYPE (var)); | |
2d043327 | 145 | /* If base variable hasn't been seen, set it up. */ |
d9dd21a8 | 146 | slot = tree_to_index.find_slot (m, INSERT); |
4ae5778c | 147 | if (!*slot) |
148 | { | |
149 | baseindex = m - mapstorage; | |
150 | m->to = baseindex; | |
151 | *slot = m; | |
152 | m++; | |
2d043327 | 153 | } |
4ae5778c | 154 | else |
155 | baseindex = (*slot)->to; | |
156 | map->partition_to_base_index[x] = baseindex; | |
2d043327 | 157 | } |
158 | ||
4ae5778c | 159 | map->num_basevars = m - mapstorage; |
2d043327 | 160 | |
4ae5778c | 161 | free (mapstorage); |
2d043327 | 162 | } |
163 | ||
4ee9c684 | 164 | |
2d043327 | 165 | /* Remove the base table in MAP. */ |
4ee9c684 | 166 | |
2d043327 | 167 | static void |
168 | var_map_base_fini (var_map map) | |
169 | { | |
170 | /* Free the basevar info if it is present. */ | |
171 | if (map->partition_to_base_index != NULL) | |
172 | { | |
2d043327 | 173 | free (map->partition_to_base_index); |
174 | map->partition_to_base_index = NULL; | |
175 | map->num_basevars = 0; | |
176 | } | |
177 | } | |
4ee9c684 | 178 | /* Create a variable partition map of SIZE, initialize and return it. */ |
179 | ||
180 | var_map | |
181 | init_var_map (int size) | |
182 | { | |
183 | var_map map; | |
184 | ||
185 | map = (var_map) xmalloc (sizeof (struct _var_map)); | |
186 | map->var_partition = partition_new (size); | |
4ee9c684 | 187 | |
2d043327 | 188 | map->partition_to_view = NULL; |
189 | map->view_to_partition = NULL; | |
4ee9c684 | 190 | map->num_partitions = size; |
191 | map->partition_size = size; | |
2d043327 | 192 | map->num_basevars = 0; |
193 | map->partition_to_base_index = NULL; | |
4ee9c684 | 194 | return map; |
195 | } | |
196 | ||
197 | ||
198 | /* Free memory associated with MAP. */ | |
199 | ||
200 | void | |
201 | delete_var_map (var_map map) | |
202 | { | |
2d043327 | 203 | var_map_base_fini (map); |
4ee9c684 | 204 | partition_delete (map->var_partition); |
dd045aee | 205 | free (map->partition_to_view); |
206 | free (map->view_to_partition); | |
4ee9c684 | 207 | free (map); |
208 | } | |
209 | ||
210 | ||
48e1416a | 211 | /* This function will combine the partitions in MAP for VAR1 and VAR2. It |
212 | Returns the partition which represents the new partition. If the two | |
0bed3869 | 213 | partitions cannot be combined, NO_PARTITION is returned. */ |
4ee9c684 | 214 | |
215 | int | |
216 | var_union (var_map map, tree var1, tree var2) | |
217 | { | |
218 | int p1, p2, p3; | |
a8dd994c | 219 | |
220 | gcc_assert (TREE_CODE (var1) == SSA_NAME); | |
221 | gcc_assert (TREE_CODE (var2) == SSA_NAME); | |
4ee9c684 | 222 | |
48e1416a | 223 | /* This is independent of partition_to_view. If partition_to_view is |
4ee9c684 | 224 | on, then whichever one of these partitions is absorbed will never have a |
2d043327 | 225 | dereference into the partition_to_view array any more. */ |
4ee9c684 | 226 | |
a8dd994c | 227 | p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1)); |
228 | p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2)); | |
4ee9c684 | 229 | |
8c0963c4 | 230 | gcc_assert (p1 != NO_PARTITION); |
231 | gcc_assert (p2 != NO_PARTITION); | |
4ee9c684 | 232 | |
233 | if (p1 == p2) | |
234 | p3 = p1; | |
235 | else | |
236 | p3 = partition_union (map->var_partition, p1, p2); | |
237 | ||
2d043327 | 238 | if (map->partition_to_view) |
239 | p3 = map->partition_to_view[p3]; | |
4ee9c684 | 240 | |
4ee9c684 | 241 | return p3; |
242 | } | |
243 | ||
48e1416a | 244 | |
245 | /* Compress the partition numbers in MAP such that they fall in the range | |
4ee9c684 | 246 | 0..(num_partitions-1) instead of wherever they turned out during |
247 | the partitioning exercise. This removes any references to unused | |
248 | partitions, thereby allowing bitmaps and other vectors to be much | |
48e1416a | 249 | denser. |
4ee9c684 | 250 | |
251 | This is implemented such that compaction doesn't affect partitioning. | |
252 | Ie., once partitions are created and possibly merged, running one | |
253 | or more different kind of compaction will not affect the partitions | |
254 | themselves. Their index might change, but all the same variables will | |
255 | still be members of the same partition group. This allows work on reduced | |
256 | sets, and no loss of information when a larger set is later desired. | |
257 | ||
258 | In particular, coalescing can work on partitions which have 2 or more | |
259 | definitions, and then 'recompact' later to include all the single | |
260 | definitions for assignment to program variables. */ | |
261 | ||
2d043327 | 262 | |
48e1416a | 263 | /* Set MAP back to the initial state of having no partition view. Return a |
264 | bitmap which has a bit set for each partition number which is in use in the | |
2d043327 | 265 | varmap. */ |
266 | ||
267 | static bitmap | |
268 | partition_view_init (var_map map) | |
4ee9c684 | 269 | { |
2d043327 | 270 | bitmap used; |
271 | int tmp; | |
272 | unsigned int x; | |
4ee9c684 | 273 | |
2d043327 | 274 | used = BITMAP_ALLOC (NULL); |
4ee9c684 | 275 | |
2d043327 | 276 | /* Already in a view? Abandon the old one. */ |
277 | if (map->partition_to_view) | |
4ee9c684 | 278 | { |
2d043327 | 279 | free (map->partition_to_view); |
280 | map->partition_to_view = NULL; | |
4ee9c684 | 281 | } |
2d043327 | 282 | if (map->view_to_partition) |
4ee9c684 | 283 | { |
2d043327 | 284 | free (map->view_to_partition); |
285 | map->view_to_partition = NULL; | |
4ee9c684 | 286 | } |
287 | ||
4ee9c684 | 288 | /* Find out which partitions are actually referenced. */ |
2d043327 | 289 | for (x = 0; x < map->partition_size; x++) |
4ee9c684 | 290 | { |
291 | tmp = partition_find (map->var_partition, x); | |
7c782c9b | 292 | if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp)) |
a8dd994c | 293 | && (!has_zero_uses (ssa_name (tmp)) |
294 | || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp)))) | |
2d043327 | 295 | bitmap_set_bit (used, tmp); |
4ee9c684 | 296 | } |
297 | ||
2d043327 | 298 | map->num_partitions = map->partition_size; |
299 | return used; | |
300 | } | |
301 | ||
302 | ||
303 | /* This routine will finalize the view data for MAP based on the partitions | |
48e1416a | 304 | set in SELECTED. This is either the same bitmap returned from |
2d043327 | 305 | partition_view_init, or a trimmed down version if some of those partitions |
306 | were not desired in this view. SELECTED is freed before returning. */ | |
307 | ||
48e1416a | 308 | static void |
2d043327 | 309 | partition_view_fini (var_map map, bitmap selected) |
310 | { | |
311 | bitmap_iterator bi; | |
312 | unsigned count, i, x, limit; | |
2d043327 | 313 | |
314 | gcc_assert (selected); | |
315 | ||
316 | count = bitmap_count_bits (selected); | |
317 | limit = map->partition_size; | |
318 | ||
319 | /* If its a one-to-one ratio, we don't need any view compaction. */ | |
320 | if (count < limit) | |
4ee9c684 | 321 | { |
2d043327 | 322 | map->partition_to_view = (int *)xmalloc (limit * sizeof (int)); |
323 | memset (map->partition_to_view, 0xff, (limit * sizeof (int))); | |
324 | map->view_to_partition = (int *)xmalloc (count * sizeof (int)); | |
3e790786 | 325 | |
2d043327 | 326 | i = 0; |
327 | /* Give each selected partition an index. */ | |
328 | EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi) | |
4ee9c684 | 329 | { |
2d043327 | 330 | map->partition_to_view[x] = i; |
331 | map->view_to_partition[i] = x; | |
2d043327 | 332 | i++; |
3e790786 | 333 | } |
2d043327 | 334 | gcc_assert (i == count); |
335 | map->num_partitions = i; | |
4ee9c684 | 336 | } |
2d043327 | 337 | |
338 | BITMAP_FREE (selected); | |
339 | } | |
340 | ||
341 | ||
48e1416a | 342 | /* Create a partition view which includes all the used partitions in MAP. If |
2d043327 | 343 | WANT_BASES is true, create the base variable map as well. */ |
344 | ||
4fb07d00 | 345 | void |
2d043327 | 346 | partition_view_normal (var_map map, bool want_bases) |
347 | { | |
348 | bitmap used; | |
349 | ||
350 | used = partition_view_init (map); | |
351 | partition_view_fini (map, used); | |
352 | ||
353 | if (want_bases) | |
354 | var_map_base_init (map); | |
4ee9c684 | 355 | else |
2d043327 | 356 | var_map_base_fini (map); |
357 | } | |
358 | ||
359 | ||
48e1416a | 360 | /* Create a partition view in MAP which includes just partitions which occur in |
361 | the bitmap ONLY. If WANT_BASES is true, create the base variable map | |
2d043327 | 362 | as well. */ |
363 | ||
4fb07d00 | 364 | void |
2d043327 | 365 | partition_view_bitmap (var_map map, bitmap only, bool want_bases) |
366 | { | |
367 | bitmap used; | |
368 | bitmap new_partitions = BITMAP_ALLOC (NULL); | |
369 | unsigned x, p; | |
370 | bitmap_iterator bi; | |
371 | ||
372 | used = partition_view_init (map); | |
373 | EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi) | |
4ee9c684 | 374 | { |
2d043327 | 375 | p = partition_find (map->var_partition, x); |
376 | gcc_assert (bitmap_bit_p (used, p)); | |
377 | bitmap_set_bit (new_partitions, p); | |
4ee9c684 | 378 | } |
2d043327 | 379 | partition_view_fini (map, new_partitions); |
4ee9c684 | 380 | |
2d043327 | 381 | if (want_bases) |
382 | var_map_base_init (map); | |
383 | else | |
384 | var_map_base_fini (map); | |
4ee9c684 | 385 | } |
386 | ||
387 | ||
4ae5778c | 388 | static bitmap usedvars; |
389 | ||
920bd157 | 390 | /* Mark VAR as used, so that it'll be preserved during rtl expansion. |
391 | Returns true if VAR wasn't marked before. */ | |
4ae5778c | 392 | |
920bd157 | 393 | static inline bool |
4ae5778c | 394 | set_is_used (tree var) |
395 | { | |
920bd157 | 396 | return bitmap_set_bit (usedvars, DECL_UID (var)); |
4ae5778c | 397 | } |
398 | ||
399 | /* Return true if VAR is marked as used. */ | |
400 | ||
401 | static inline bool | |
402 | is_used_p (tree var) | |
403 | { | |
404 | return bitmap_bit_p (usedvars, DECL_UID (var)); | |
405 | } | |
406 | ||
920bd157 | 407 | static inline void mark_all_vars_used (tree *); |
4ee9c684 | 408 | |
280450fa | 409 | /* Helper function for mark_all_vars_used, called via walk_tree. */ |
410 | ||
411 | static tree | |
920bd157 | 412 | mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
280450fa | 413 | { |
414 | tree t = *tp; | |
2ff2a12b | 415 | enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t)); |
416 | tree b; | |
280450fa | 417 | |
db22d3cc | 418 | if (TREE_CODE (t) == SSA_NAME) |
ec11736b | 419 | { |
420 | *walk_subtrees = 0; | |
421 | t = SSA_NAME_VAR (t); | |
422 | if (!t) | |
423 | return NULL; | |
424 | } | |
75a70cf9 | 425 | |
426 | if (IS_EXPR_CODE_CLASS (c) | |
2ff2a12b | 427 | && (b = TREE_BLOCK (t)) != NULL) |
428 | TREE_USED (b) = true; | |
db22d3cc | 429 | |
28daba6f | 430 | /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those |
431 | fields do not contain vars. */ | |
b06d1934 | 432 | if (TREE_CODE (t) == TARGET_MEM_REF) |
433 | { | |
920bd157 | 434 | mark_all_vars_used (&TMR_BASE (t)); |
435 | mark_all_vars_used (&TMR_INDEX (t)); | |
436 | mark_all_vars_used (&TMR_INDEX2 (t)); | |
b06d1934 | 437 | *walk_subtrees = 0; |
438 | return NULL; | |
439 | } | |
440 | ||
280450fa | 441 | /* Only need to mark VAR_DECLS; parameters and return results are not |
442 | eliminated as unused. */ | |
443 | if (TREE_CODE (t) == VAR_DECL) | |
a93b21ea | 444 | { |
920bd157 | 445 | /* When a global var becomes used for the first time also walk its |
446 | initializer (non global ones don't have any). */ | |
c8a9710d | 447 | if (set_is_used (t) && is_global_var (t) |
448 | && DECL_CONTEXT (t) == current_function_decl) | |
920bd157 | 449 | mark_all_vars_used (&DECL_INITIAL (t)); |
a93b21ea | 450 | } |
6ceec668 | 451 | /* remove_unused_scope_block_p requires information about labels |
452 | which are not DECL_IGNORED_P to tell if they might be used in the IL. */ | |
ad75582e | 453 | else if (TREE_CODE (t) == LABEL_DECL) |
6ceec668 | 454 | /* Although the TREE_USED values that the frontend uses would be |
455 | acceptable (albeit slightly over-conservative) for our purposes, | |
456 | init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we | |
457 | must re-compute it here. */ | |
458 | TREE_USED (t) = 1; | |
280450fa | 459 | |
ce45a448 | 460 | if (IS_TYPE_OR_DECL_P (t)) |
280450fa | 461 | *walk_subtrees = 0; |
462 | ||
463 | return NULL; | |
464 | } | |
465 | ||
2ff2a12b | 466 | /* Mark the scope block SCOPE and its subblocks unused when they can be |
467 | possibly eliminated if dead. */ | |
468 | ||
469 | static void | |
470 | mark_scope_block_unused (tree scope) | |
471 | { | |
472 | tree t; | |
473 | TREE_USED (scope) = false; | |
474 | if (!(*debug_hooks->ignore_block) (scope)) | |
475 | TREE_USED (scope) = true; | |
476 | for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t)) | |
477 | mark_scope_block_unused (t); | |
478 | } | |
479 | ||
480 | /* Look if the block is dead (by possibly eliminating its dead subblocks) | |
48e1416a | 481 | and return true if so. |
2ff2a12b | 482 | Block is declared dead if: |
483 | 1) No statements are associated with it. | |
484 | 2) Declares no live variables | |
485 | 3) All subblocks are dead | |
486 | or there is precisely one subblocks and the block | |
487 | has same abstract origin as outer block and declares | |
488 | no variables, so it is pure wrapper. | |
4a7e4fcc | 489 | When we are not outputting full debug info, we also eliminate dead variables |
2ff2a12b | 490 | out of scope blocks to let them to be recycled by GGC and to save copying work |
491 | done by the inliner. */ | |
492 | ||
493 | static bool | |
920bd157 | 494 | remove_unused_scope_block_p (tree scope) |
2ff2a12b | 495 | { |
8c796c7c | 496 | tree *t, *next; |
2ff2a12b | 497 | bool unused = !TREE_USED (scope); |
2ff2a12b | 498 | int nsubblocks = 0; |
499 | ||
8c796c7c | 500 | for (t = &BLOCK_VARS (scope); *t; t = next) |
501 | { | |
1767a056 | 502 | next = &DECL_CHAIN (*t); |
8c796c7c | 503 | |
504 | /* Debug info of nested function refers to the block of the | |
36267649 | 505 | function. We might stil call it even if all statements |
506 | of function it was nested into was elliminated. | |
48e1416a | 507 | |
36267649 | 508 | TODO: We can actually look into cgraph to see if function |
509 | will be output to file. */ | |
8c796c7c | 510 | if (TREE_CODE (*t) == FUNCTION_DECL) |
511 | unused = false; | |
ee093c13 | 512 | |
513 | /* If a decl has a value expr, we need to instantiate it | |
514 | regardless of debug info generation, to avoid codegen | |
515 | differences in memory overlap tests. update_equiv_regs() may | |
516 | indirectly call validate_equiv_mem() to test whether a | |
517 | SET_DEST overlaps with others, and if the value expr changes | |
518 | by virtual register instantiation, we may get end up with | |
519 | different results. */ | |
520 | else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t)) | |
521 | unused = false; | |
522 | ||
25db41e9 | 523 | /* Remove everything we don't generate debug info for. */ |
524 | else if (DECL_IGNORED_P (*t)) | |
64f6c8c9 | 525 | { |
1767a056 | 526 | *t = DECL_CHAIN (*t); |
64f6c8c9 | 527 | next = t; |
528 | } | |
529 | ||
8c796c7c | 530 | /* When we are outputting debug info, we usually want to output |
531 | info about optimized-out variables in the scope blocks. | |
532 | Exception are the scope blocks not containing any instructions | |
533 | at all so user can't get into the scopes at first place. */ | |
920bd157 | 534 | else if (is_used_p (*t)) |
8c796c7c | 535 | unused = false; |
6ceec668 | 536 | else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t)) |
537 | /* For labels that are still used in the IL, the decision to | |
538 | preserve them must not depend DEBUG_INFO_LEVEL, otherwise we | |
539 | risk having different ordering in debug vs. non-debug builds | |
540 | during inlining or versioning. | |
541 | A label appearing here (we have already checked DECL_IGNORED_P) | |
542 | should not be used in the IL unless it has been explicitly used | |
543 | before, so we use TREE_USED as an approximation. */ | |
544 | /* In principle, we should do the same here as for the debug case | |
545 | below, however, when debugging, there might be additional nested | |
546 | levels that keep an upper level with a label live, so we have to | |
547 | force this block to be considered used, too. */ | |
548 | unused = false; | |
8c796c7c | 549 | |
550 | /* When we are not doing full debug info, we however can keep around | |
551 | only the used variables for cfgexpand's memory packing saving quite | |
48e1416a | 552 | a lot of memory. |
36267649 | 553 | |
554 | For sake of -g3, we keep around those vars but we don't count this as | |
555 | use of block, so innermost block with no used vars and no instructions | |
556 | can be considered dead. We only want to keep around blocks user can | |
48e1416a | 557 | breakpoint into and ask about value of optimized out variables. |
36267649 | 558 | |
29bcbc13 | 559 | Similarly we need to keep around types at least until all |
560 | variables of all nested blocks are gone. We track no | |
561 | information on whether given type is used or not, so we have | |
562 | to keep them even when not emitting debug information, | |
563 | otherwise we may end up remapping variables and their (local) | |
564 | types in different orders depending on whether debug | |
565 | information is being generated. */ | |
566 | ||
567 | else if (TREE_CODE (*t) == TYPE_DECL | |
568 | || debug_info_level == DINFO_LEVEL_NORMAL | |
45b2f957 | 569 | || debug_info_level == DINFO_LEVEL_VERBOSE) |
36267649 | 570 | ; |
7f481d3e | 571 | else |
8c796c7c | 572 | { |
1767a056 | 573 | *t = DECL_CHAIN (*t); |
8c796c7c | 574 | next = t; |
575 | } | |
576 | } | |
577 | ||
2ff2a12b | 578 | for (t = &BLOCK_SUBBLOCKS (scope); *t ;) |
920bd157 | 579 | if (remove_unused_scope_block_p (*t)) |
2ff2a12b | 580 | { |
581 | if (BLOCK_SUBBLOCKS (*t)) | |
582 | { | |
583 | tree next = BLOCK_CHAIN (*t); | |
584 | tree supercontext = BLOCK_SUPERCONTEXT (*t); | |
cee43f7e | 585 | |
2ff2a12b | 586 | *t = BLOCK_SUBBLOCKS (*t); |
cee43f7e | 587 | while (BLOCK_CHAIN (*t)) |
588 | { | |
589 | BLOCK_SUPERCONTEXT (*t) = supercontext; | |
590 | t = &BLOCK_CHAIN (*t); | |
591 | } | |
2ff2a12b | 592 | BLOCK_CHAIN (*t) = next; |
593 | BLOCK_SUPERCONTEXT (*t) = supercontext; | |
594 | t = &BLOCK_CHAIN (*t); | |
595 | nsubblocks ++; | |
596 | } | |
597 | else | |
36267649 | 598 | *t = BLOCK_CHAIN (*t); |
2ff2a12b | 599 | } |
600 | else | |
601 | { | |
602 | t = &BLOCK_CHAIN (*t); | |
603 | nsubblocks ++; | |
604 | } | |
cee43f7e | 605 | |
606 | ||
607 | if (!unused) | |
608 | ; | |
2ff2a12b | 609 | /* Outer scope is always used. */ |
cee43f7e | 610 | else if (!BLOCK_SUPERCONTEXT (scope) |
611 | || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL) | |
2ff2a12b | 612 | unused = false; |
cee43f7e | 613 | /* Innermost blocks with no live variables nor statements can be always |
614 | eliminated. */ | |
615 | else if (!nsubblocks) | |
616 | ; | |
7fa9fa16 | 617 | /* When not generating debug info we can eliminate info on unused |
618 | variables. */ | |
94bed7c3 | 619 | else if (!flag_auto_profile && debug_info_level == DINFO_LEVEL_NONE) |
f63d3ecc | 620 | { |
7fa9fa16 | 621 | /* Even for -g0 don't prune outer scopes from artificial |
f63d3ecc | 622 | functions, otherwise diagnostics using tree_nonartificial_location |
623 | will not be emitted properly. */ | |
624 | if (inlined_function_outer_scope_p (scope)) | |
625 | { | |
626 | tree ao = scope; | |
627 | ||
628 | while (ao | |
629 | && TREE_CODE (ao) == BLOCK | |
630 | && BLOCK_ABSTRACT_ORIGIN (ao) != ao) | |
631 | ao = BLOCK_ABSTRACT_ORIGIN (ao); | |
632 | if (ao | |
633 | && TREE_CODE (ao) == FUNCTION_DECL | |
634 | && DECL_DECLARED_INLINE_P (ao) | |
635 | && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao))) | |
636 | unused = false; | |
637 | } | |
638 | } | |
4b5d70fd | 639 | else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope)) |
2ff2a12b | 640 | unused = false; |
cee43f7e | 641 | /* See if this block is important for representation of inlined function. |
642 | Inlined functions are always represented by block with | |
643 | block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION | |
644 | set... */ | |
645 | else if (inlined_function_outer_scope_p (scope)) | |
646 | unused = false; | |
647 | else | |
648 | /* Verfify that only blocks with source location set | |
649 | are entry points to the inlined functions. */ | |
8e7408e3 | 650 | gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) |
651 | == UNKNOWN_LOCATION); | |
b26d0c9e | 652 | |
653 | TREE_USED (scope) = !unused; | |
2ff2a12b | 654 | return unused; |
655 | } | |
2d043327 | 656 | |
48e1416a | 657 | /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be |
280450fa | 658 | eliminated during the tree->rtl conversion process. */ |
659 | ||
660 | static inline void | |
920bd157 | 661 | mark_all_vars_used (tree *expr_p) |
280450fa | 662 | { |
920bd157 | 663 | walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL); |
280450fa | 664 | } |
665 | ||
5169661d | 666 | /* Helper function for clear_unused_block_pointer, called via walk_tree. */ |
667 | ||
668 | static tree | |
669 | clear_unused_block_pointer_1 (tree *tp, int *, void *) | |
670 | { | |
671 | if (EXPR_P (*tp) && TREE_BLOCK (*tp) | |
672 | && !TREE_USED (TREE_BLOCK (*tp))) | |
673 | TREE_SET_BLOCK (*tp, NULL); | |
5169661d | 674 | return NULL_TREE; |
675 | } | |
676 | ||
9f559b20 | 677 | /* Set all block pointer in debug or clobber stmt to NULL if the block |
678 | is unused, so that they will not be streamed out. */ | |
5169661d | 679 | |
680 | static void | |
f1ff4562 | 681 | clear_unused_block_pointer (void) |
5169661d | 682 | { |
683 | basic_block bb; | |
684 | gimple_stmt_iterator gsi; | |
1aa7a266 | 685 | |
fc00614f | 686 | FOR_EACH_BB_FN (bb, cfun) |
5169661d | 687 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
688 | { | |
689 | unsigned i; | |
690 | tree b; | |
691 | gimple stmt = gsi_stmt (gsi); | |
692 | ||
9f559b20 | 693 | if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt)) |
5169661d | 694 | continue; |
695 | b = gimple_block (stmt); | |
696 | if (b && !TREE_USED (b)) | |
697 | gimple_set_block (stmt, NULL); | |
698 | for (i = 0; i < gimple_num_ops (stmt); i++) | |
699 | walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1, | |
700 | NULL, NULL); | |
701 | } | |
702 | } | |
b79917fd | 703 | |
704 | /* Dump scope blocks starting at SCOPE to FILE. INDENT is the | |
705 | indentation level and FLAGS is as in print_generic_expr. */ | |
36267649 | 706 | |
707 | static void | |
708 | dump_scope_block (FILE *file, int indent, tree scope, int flags) | |
709 | { | |
710 | tree var, t; | |
4b5d70fd | 711 | unsigned int i; |
36267649 | 712 | |
cee43f7e | 713 | fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope), |
714 | TREE_USED (scope) ? "" : " (unused)", | |
715 | BLOCK_ABSTRACT (scope) ? " (abstract)": ""); | |
8e7408e3 | 716 | if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION) |
cee43f7e | 717 | { |
718 | expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope)); | |
719 | fprintf (file, " %s:%i", s.file, s.line); | |
720 | } | |
721 | if (BLOCK_ABSTRACT_ORIGIN (scope)) | |
36267649 | 722 | { |
cee43f7e | 723 | tree origin = block_ultimate_origin (scope); |
724 | if (origin) | |
725 | { | |
726 | fprintf (file, " Originating from :"); | |
727 | if (DECL_P (origin)) | |
728 | print_generic_decl (file, origin, flags); | |
729 | else | |
730 | fprintf (file, "#%i", BLOCK_NUMBER (origin)); | |
731 | } | |
36267649 | 732 | } |
cee43f7e | 733 | fprintf (file, " \n"); |
1767a056 | 734 | for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var)) |
36267649 | 735 | { |
f665f7bb | 736 | fprintf (file, "%*s", indent, ""); |
36267649 | 737 | print_generic_decl (file, var, flags); |
4ae5778c | 738 | fprintf (file, "\n"); |
36267649 | 739 | } |
4b5d70fd | 740 | for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++) |
741 | { | |
742 | fprintf (file, "%*s",indent, ""); | |
743 | print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i), | |
744 | flags); | |
745 | fprintf (file, " (nonlocalized)\n"); | |
746 | } | |
36267649 | 747 | for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t)) |
748 | dump_scope_block (file, indent + 2, t, flags); | |
cee43f7e | 749 | fprintf (file, "\n%*s}\n",indent, ""); |
36267649 | 750 | } |
751 | ||
7496b609 | 752 | /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS |
753 | is as in print_generic_expr. */ | |
754 | ||
4b987fac | 755 | DEBUG_FUNCTION void |
7496b609 | 756 | debug_scope_block (tree scope, int flags) |
757 | { | |
758 | dump_scope_block (stderr, 0, scope, flags); | |
759 | } | |
760 | ||
b79917fd | 761 | |
762 | /* Dump the tree of lexical scopes of current_function_decl to FILE. | |
763 | FLAGS is as in print_generic_expr. */ | |
764 | ||
cee43f7e | 765 | void |
766 | dump_scope_blocks (FILE *file, int flags) | |
767 | { | |
768 | dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags); | |
769 | } | |
db22d3cc | 770 | |
b79917fd | 771 | |
772 | /* Dump the tree of lexical scopes of current_function_decl to stderr. | |
773 | FLAGS is as in print_generic_expr. */ | |
774 | ||
4b987fac | 775 | DEBUG_FUNCTION void |
b79917fd | 776 | debug_scope_blocks (int flags) |
777 | { | |
778 | dump_scope_blocks (stderr, flags); | |
779 | } | |
780 | ||
db22d3cc | 781 | /* Remove local variables that are not referenced in the IL. */ |
782 | ||
783 | void | |
784 | remove_unused_locals (void) | |
785 | { | |
786 | basic_block bb; | |
b03e5397 | 787 | tree var; |
920bd157 | 788 | unsigned srcidx, dstidx, num; |
3c25489e | 789 | bool have_local_clobbers = false; |
db22d3cc | 790 | |
fe5c69b7 | 791 | /* Removing declarations from lexical blocks when not optimizing is |
792 | not only a waste of time, it actually causes differences in stack | |
793 | layout. */ | |
794 | if (!optimize) | |
795 | return; | |
796 | ||
4b366dd3 | 797 | timevar_push (TV_REMOVE_UNUSED); |
798 | ||
36267649 | 799 | mark_scope_block_unused (DECL_INITIAL (current_function_decl)); |
75a70cf9 | 800 | |
4ae5778c | 801 | usedvars = BITMAP_ALLOC (NULL); |
db22d3cc | 802 | |
803 | /* Walk the CFG marking all referenced symbols. */ | |
fc00614f | 804 | FOR_EACH_BB_FN (bb, cfun) |
db22d3cc | 805 | { |
75a70cf9 | 806 | gimple_stmt_iterator gsi; |
807 | size_t i; | |
32dedf8f | 808 | edge_iterator ei; |
809 | edge e; | |
db22d3cc | 810 | |
811 | /* Walk the statements. */ | |
75a70cf9 | 812 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
813 | { | |
814 | gimple stmt = gsi_stmt (gsi); | |
815 | tree b = gimple_block (stmt); | |
816 | ||
9845d120 | 817 | if (is_gimple_debug (stmt)) |
818 | continue; | |
819 | ||
3c25489e | 820 | if (gimple_clobber_p (stmt)) |
821 | { | |
822 | have_local_clobbers = true; | |
823 | continue; | |
824 | } | |
825 | ||
75a70cf9 | 826 | if (b) |
827 | TREE_USED (b) = true; | |
db22d3cc | 828 | |
75a70cf9 | 829 | for (i = 0; i < gimple_num_ops (stmt); i++) |
920bd157 | 830 | mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i)); |
75a70cf9 | 831 | } |
832 | ||
1a91d914 | 833 | for (gphi_iterator gpi = gsi_start_phis (bb); |
834 | !gsi_end_p (gpi); | |
835 | gsi_next (&gpi)) | |
db22d3cc | 836 | { |
837 | use_operand_p arg_p; | |
838 | ssa_op_iter i; | |
75a70cf9 | 839 | tree def; |
1a91d914 | 840 | gphi *phi = gpi.phi (); |
db22d3cc | 841 | |
7c782c9b | 842 | if (virtual_operand_p (gimple_phi_result (phi))) |
db22d3cc | 843 | continue; |
844 | ||
75a70cf9 | 845 | def = gimple_phi_result (phi); |
920bd157 | 846 | mark_all_vars_used (&def); |
db22d3cc | 847 | |
848 | FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES) | |
849 | { | |
850 | tree arg = USE_FROM_PTR (arg_p); | |
5169661d | 851 | int index = PHI_ARG_INDEX_FROM_USE (arg_p); |
852 | tree block = | |
853 | LOCATION_BLOCK (gimple_phi_arg_location (phi, index)); | |
854 | if (block != NULL) | |
855 | TREE_USED (block) = true; | |
920bd157 | 856 | mark_all_vars_used (&arg); |
db22d3cc | 857 | } |
858 | } | |
32dedf8f | 859 | |
860 | FOR_EACH_EDGE (e, ei, bb->succs) | |
f1ff4562 | 861 | if (LOCATION_BLOCK (e->goto_locus) != NULL) |
5169661d | 862 | TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true; |
db22d3cc | 863 | } |
864 | ||
3c25489e | 865 | /* We do a two-pass approach about the out-of-scope clobbers. We want |
866 | to remove them if they are the only references to a local variable, | |
867 | but we want to retain them when there's any other. So the first pass | |
868 | ignores them, and the second pass (if there were any) tries to remove | |
869 | them. */ | |
870 | if (have_local_clobbers) | |
fc00614f | 871 | FOR_EACH_BB_FN (bb, cfun) |
3c25489e | 872 | { |
873 | gimple_stmt_iterator gsi; | |
874 | ||
875 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) | |
876 | { | |
877 | gimple stmt = gsi_stmt (gsi); | |
878 | tree b = gimple_block (stmt); | |
879 | ||
880 | if (gimple_clobber_p (stmt)) | |
881 | { | |
882 | tree lhs = gimple_assign_lhs (stmt); | |
9f559b20 | 883 | tree base = get_base_address (lhs); |
884 | /* Remove clobbers referencing unused vars, or clobbers | |
885 | with MEM_REF lhs referencing uninitialized pointers. */ | |
886 | if ((TREE_CODE (base) == VAR_DECL && !is_used_p (base)) | |
887 | || (TREE_CODE (lhs) == MEM_REF | |
888 | && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME | |
889 | && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0)) | |
890 | && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0))) | |
891 | != PARM_DECL))) | |
3c25489e | 892 | { |
893 | unlink_stmt_vdef (stmt); | |
894 | gsi_remove (&gsi, true); | |
895 | release_defs (stmt); | |
896 | continue; | |
897 | } | |
898 | if (b) | |
899 | TREE_USED (b) = true; | |
900 | } | |
901 | gsi_next (&gsi); | |
902 | } | |
903 | } | |
904 | ||
e15deb4b | 905 | cfun->has_local_explicit_reg_vars = false; |
906 | ||
920bd157 | 907 | /* Remove unmarked local and global vars from local_decls. */ |
f1f41a6c | 908 | num = vec_safe_length (cfun->local_decls); |
501bdd19 | 909 | for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++) |
db22d3cc | 910 | { |
f1f41a6c | 911 | var = (*cfun->local_decls)[srcidx]; |
ad75582e | 912 | if (TREE_CODE (var) == VAR_DECL) |
db22d3cc | 913 | { |
920bd157 | 914 | if (!is_used_p (var)) |
ad75582e | 915 | { |
1ba198c0 | 916 | tree def; |
7843e4bc | 917 | if (cfun->nonlocal_goto_save_area |
918 | && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var) | |
919 | cfun->nonlocal_goto_save_area = NULL; | |
1ba198c0 | 920 | /* Release any default def associated with var. */ |
921 | if ((def = ssa_default_def (cfun, var)) != NULL_TREE) | |
922 | { | |
923 | set_ssa_default_def (cfun, var, NULL_TREE); | |
924 | release_ssa_name (def); | |
925 | } | |
ad75582e | 926 | continue; |
a93b21ea | 927 | } |
db22d3cc | 928 | } |
ad75582e | 929 | if (TREE_CODE (var) == VAR_DECL |
930 | && DECL_HARD_REGISTER (var) | |
931 | && !is_global_var (var)) | |
e15deb4b | 932 | cfun->has_local_explicit_reg_vars = true; |
2ab2ce89 | 933 | |
501bdd19 | 934 | if (srcidx != dstidx) |
f1f41a6c | 935 | (*cfun->local_decls)[dstidx] = var; |
501bdd19 | 936 | dstidx++; |
db22d3cc | 937 | } |
501bdd19 | 938 | if (dstidx != num) |
560965e9 | 939 | { |
940 | statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx); | |
941 | cfun->local_decls->truncate (dstidx); | |
942 | } | |
2887c015 | 943 | |
920bd157 | 944 | remove_unused_scope_block_p (DECL_INITIAL (current_function_decl)); |
5169661d | 945 | clear_unused_block_pointer (); |
2887c015 | 946 | |
4ae5778c | 947 | BITMAP_FREE (usedvars); |
fe15b701 | 948 | |
36267649 | 949 | if (dump_file && (dump_flags & TDF_DETAILS)) |
950 | { | |
951 | fprintf (dump_file, "Scope blocks after cleanups:\n"); | |
cee43f7e | 952 | dump_scope_blocks (dump_file, dump_flags); |
36267649 | 953 | } |
4b366dd3 | 954 | |
955 | timevar_pop (TV_REMOVE_UNUSED); | |
db22d3cc | 956 | } |
957 | ||
4fb07d00 | 958 | /* Obstack for globale liveness info bitmaps. We don't want to put these |
959 | on the default obstack because these bitmaps can grow quite large and | |
960 | we'll hold on to all that memory until the end of the compiler run. | |
961 | As a bonus, delete_tree_live_info can destroy all the bitmaps by just | |
962 | releasing the whole obstack. */ | |
963 | static bitmap_obstack liveness_bitmap_obstack; | |
4ee9c684 | 964 | |
965 | /* Allocate and return a new live range information object base on MAP. */ | |
966 | ||
967 | static tree_live_info_p | |
968 | new_tree_live_info (var_map map) | |
969 | { | |
970 | tree_live_info_p live; | |
4fb07d00 | 971 | basic_block bb; |
4ee9c684 | 972 | |
4fb07d00 | 973 | live = XNEW (struct tree_live_info_d); |
4ee9c684 | 974 | live->map = map; |
fe672ac0 | 975 | live->num_blocks = last_basic_block_for_fn (cfun); |
4ee9c684 | 976 | |
fe672ac0 | 977 | live->livein = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun)); |
fc00614f | 978 | FOR_EACH_BB_FN (bb, cfun) |
4fb07d00 | 979 | bitmap_initialize (&live->livein[bb->index], &liveness_bitmap_obstack); |
4ee9c684 | 980 | |
fe672ac0 | 981 | live->liveout = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun)); |
fc00614f | 982 | FOR_EACH_BB_FN (bb, cfun) |
4fb07d00 | 983 | bitmap_initialize (&live->liveout[bb->index], &liveness_bitmap_obstack); |
30928c44 | 984 | |
fe672ac0 | 985 | live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun)); |
30928c44 | 986 | live->stack_top = live->work_stack; |
987 | ||
4fb07d00 | 988 | live->global = BITMAP_ALLOC (&liveness_bitmap_obstack); |
4ee9c684 | 989 | return live; |
990 | } | |
991 | ||
992 | ||
993 | /* Free storage for live range info object LIVE. */ | |
994 | ||
48e1416a | 995 | void |
4ee9c684 | 996 | delete_tree_live_info (tree_live_info_p live) |
997 | { | |
4fb07d00 | 998 | bitmap_obstack_release (&liveness_bitmap_obstack); |
30928c44 | 999 | free (live->work_stack); |
30928c44 | 1000 | free (live->liveout); |
30928c44 | 1001 | free (live->livein); |
30928c44 | 1002 | free (live); |
4ee9c684 | 1003 | } |
1004 | ||
1005 | ||
48e1416a | 1006 | /* Visit basic block BB and propagate any required live on entry bits from |
1007 | LIVE into the predecessors. VISITED is the bitmap of visited blocks. | |
7920eed5 | 1008 | TMP is a temporary work bitmap which is passed in to avoid reallocating |
30928c44 | 1009 | it each time. */ |
4ee9c684 | 1010 | |
48e1416a | 1011 | static void |
30928c44 | 1012 | loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited, |
1013 | bitmap tmp) | |
4ee9c684 | 1014 | { |
30928c44 | 1015 | edge e; |
1016 | bool change; | |
1017 | edge_iterator ei; | |
1018 | basic_block pred_bb; | |
1019 | bitmap loe; | |
4ee9c684 | 1020 | |
688425e8 | 1021 | gcc_checking_assert (!bitmap_bit_p (visited, bb->index)); |
08b7917c | 1022 | bitmap_set_bit (visited, bb->index); |
688425e8 | 1023 | |
30928c44 | 1024 | loe = live_on_entry (live, bb); |
4ee9c684 | 1025 | |
30928c44 | 1026 | FOR_EACH_EDGE (e, ei, bb->preds) |
4ee9c684 | 1027 | { |
30928c44 | 1028 | pred_bb = e->src; |
34154e27 | 1029 | if (pred_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
30928c44 | 1030 | continue; |
2d043327 | 1031 | /* TMP is variables live-on-entry from BB that aren't defined in the |
48e1416a | 1032 | predecessor block. This should be the live on entry vars to pred. |
30928c44 | 1033 | Note that liveout is the DEFs in a block while live on entry is |
1034 | being calculated. */ | |
4fb07d00 | 1035 | bitmap_and_compl (tmp, loe, &live->liveout[pred_bb->index]); |
30928c44 | 1036 | |
48e1416a | 1037 | /* Add these bits to live-on-entry for the pred. if there are any |
30928c44 | 1038 | changes, and pred_bb has been visited already, add it to the |
1039 | revisit stack. */ | |
1040 | change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp); | |
08b7917c | 1041 | if (bitmap_bit_p (visited, pred_bb->index) && change) |
30928c44 | 1042 | { |
08b7917c | 1043 | bitmap_clear_bit (visited, pred_bb->index); |
30928c44 | 1044 | *(live->stack_top)++ = pred_bb->index; |
1045 | } | |
4ee9c684 | 1046 | } |
1047 | } | |
1048 | ||
1049 | ||
48e1416a | 1050 | /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses |
7920eed5 | 1051 | of all the variables. */ |
4ee9c684 | 1052 | |
30928c44 | 1053 | static void |
1054 | live_worklist (tree_live_info_p live) | |
4ee9c684 | 1055 | { |
30928c44 | 1056 | unsigned b; |
4ee9c684 | 1057 | basic_block bb; |
fe672ac0 | 1058 | sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1); |
4fb07d00 | 1059 | bitmap tmp = BITMAP_ALLOC (&liveness_bitmap_obstack); |
4ee9c684 | 1060 | |
53c5d9d4 | 1061 | bitmap_clear (visited); |
4ee9c684 | 1062 | |
80777cd8 | 1063 | /* Visit all the blocks in reverse order and propagate live on entry values |
30928c44 | 1064 | into the predecessors blocks. */ |
7a46197b | 1065 | FOR_EACH_BB_REVERSE_FN (bb, cfun) |
30928c44 | 1066 | loe_visit_block (live, bb, visited, tmp); |
4ee9c684 | 1067 | |
30928c44 | 1068 | /* Process any blocks which require further iteration. */ |
1069 | while (live->stack_top != live->work_stack) | |
4ee9c684 | 1070 | { |
30928c44 | 1071 | b = *--(live->stack_top); |
f5a6b05f | 1072 | loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited, tmp); |
30928c44 | 1073 | } |
4ee9c684 | 1074 | |
30928c44 | 1075 | BITMAP_FREE (tmp); |
1076 | sbitmap_free (visited); | |
1077 | } | |
4ee9c684 | 1078 | |
4ee9c684 | 1079 | |
7920eed5 | 1080 | /* Calculate the initial live on entry vector for SSA_NAME using immediate_use |
30928c44 | 1081 | links. Set the live on entry fields in LIVE. Def's are marked temporarily |
1082 | in the liveout vector. */ | |
4ee9c684 | 1083 | |
30928c44 | 1084 | static void |
1085 | set_var_live_on_entry (tree ssa_name, tree_live_info_p live) | |
1086 | { | |
1087 | int p; | |
75a70cf9 | 1088 | gimple stmt; |
30928c44 | 1089 | use_operand_p use; |
1090 | basic_block def_bb = NULL; | |
1091 | imm_use_iterator imm_iter; | |
1092 | bool global = false; | |
4ee9c684 | 1093 | |
30928c44 | 1094 | p = var_to_partition (live->map, ssa_name); |
1095 | if (p == NO_PARTITION) | |
1096 | return; | |
4ee9c684 | 1097 | |
30928c44 | 1098 | stmt = SSA_NAME_DEF_STMT (ssa_name); |
1099 | if (stmt) | |
4ee9c684 | 1100 | { |
75a70cf9 | 1101 | def_bb = gimple_bb (stmt); |
2d043327 | 1102 | /* Mark defs in liveout bitmap temporarily. */ |
30928c44 | 1103 | if (def_bb) |
4fb07d00 | 1104 | bitmap_set_bit (&live->liveout[def_bb->index], p); |
0cc4271a | 1105 | } |
30928c44 | 1106 | else |
34154e27 | 1107 | def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun); |
4ee9c684 | 1108 | |
4d8d655b | 1109 | /* An undefined local variable does not need to be very alive. */ |
1110 | if (ssa_undefined_value_p (ssa_name, false)) | |
1111 | return; | |
1112 | ||
30928c44 | 1113 | /* Visit each use of SSA_NAME and if it isn't in the same block as the def, |
1114 | add it to the list of live on entry blocks. */ | |
1115 | FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name) | |
4ee9c684 | 1116 | { |
75a70cf9 | 1117 | gimple use_stmt = USE_STMT (use); |
30928c44 | 1118 | basic_block add_block = NULL; |
4ee9c684 | 1119 | |
75a70cf9 | 1120 | if (gimple_code (use_stmt) == GIMPLE_PHI) |
30928c44 | 1121 | { |
1122 | /* Uses in PHI's are considered to be live at exit of the SRC block | |
1123 | as this is where a copy would be inserted. Check to see if it is | |
1124 | defined in that block, or whether its live on entry. */ | |
1125 | int index = PHI_ARG_INDEX_FROM_USE (use); | |
1a91d914 | 1126 | edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt), index); |
34154e27 | 1127 | if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
4ee9c684 | 1128 | { |
30928c44 | 1129 | if (e->src != def_bb) |
1130 | add_block = e->src; | |
4ee9c684 | 1131 | } |
30928c44 | 1132 | } |
9845d120 | 1133 | else if (is_gimple_debug (use_stmt)) |
1134 | continue; | |
30928c44 | 1135 | else |
1136 | { | |
1137 | /* If its not defined in this block, its live on entry. */ | |
75a70cf9 | 1138 | basic_block use_bb = gimple_bb (use_stmt); |
30928c44 | 1139 | if (use_bb != def_bb) |
1140 | add_block = use_bb; | |
48e1416a | 1141 | } |
30928c44 | 1142 | |
1143 | /* If there was a live on entry use, set the bit. */ | |
1144 | if (add_block) | |
1145 | { | |
1146 | global = true; | |
4fb07d00 | 1147 | bitmap_set_bit (&live->livein[add_block->index], p); |
4ee9c684 | 1148 | } |
1149 | } | |
4ee9c684 | 1150 | |
30928c44 | 1151 | /* If SSA_NAME is live on entry to at least one block, fill in all the live |
1152 | on entry blocks between the def and all the uses. */ | |
1153 | if (global) | |
1154 | bitmap_set_bit (live->global, p); | |
4ee9c684 | 1155 | } |
1156 | ||
1157 | ||
1158 | /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */ | |
1159 | ||
1160 | void | |
1161 | calculate_live_on_exit (tree_live_info_p liveinfo) | |
1162 | { | |
4ee9c684 | 1163 | basic_block bb; |
1164 | edge e; | |
30928c44 | 1165 | edge_iterator ei; |
4ee9c684 | 1166 | |
2d043327 | 1167 | /* live on entry calculations used liveout vectors for defs, clear them. */ |
fc00614f | 1168 | FOR_EACH_BB_FN (bb, cfun) |
4fb07d00 | 1169 | bitmap_clear (&liveinfo->liveout[bb->index]); |
4ee9c684 | 1170 | |
1171 | /* Set all the live-on-exit bits for uses in PHIs. */ | |
fc00614f | 1172 | FOR_EACH_BB_FN (bb, cfun) |
4ee9c684 | 1173 | { |
1a91d914 | 1174 | gphi_iterator gsi; |
75a70cf9 | 1175 | size_t i; |
1176 | ||
30928c44 | 1177 | /* Mark the PHI arguments which are live on exit to the pred block. */ |
75a70cf9 | 1178 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
1179 | { | |
1a91d914 | 1180 | gphi *phi = gsi.phi (); |
75a70cf9 | 1181 | for (i = 0; i < gimple_phi_num_args (phi); i++) |
48e1416a | 1182 | { |
75a70cf9 | 1183 | tree t = PHI_ARG_DEF (phi, i); |
1184 | int p; | |
1185 | ||
1186 | if (TREE_CODE (t) != SSA_NAME) | |
1187 | continue; | |
1188 | ||
1189 | p = var_to_partition (liveinfo->map, t); | |
1190 | if (p == NO_PARTITION) | |
1191 | continue; | |
1192 | e = gimple_phi_arg_edge (phi, i); | |
34154e27 | 1193 | if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
4fb07d00 | 1194 | bitmap_set_bit (&liveinfo->liveout[e->src->index], p); |
75a70cf9 | 1195 | } |
1196 | } | |
30928c44 | 1197 | |
2d043327 | 1198 | /* Add each successors live on entry to this bock live on exit. */ |
30928c44 | 1199 | FOR_EACH_EDGE (e, ei, bb->succs) |
34154e27 | 1200 | if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) |
4fb07d00 | 1201 | bitmap_ior_into (&liveinfo->liveout[bb->index], |
30928c44 | 1202 | live_on_entry (liveinfo, e->dest)); |
4ee9c684 | 1203 | } |
30928c44 | 1204 | } |
1205 | ||
2d043327 | 1206 | |
48e1416a | 1207 | /* Given partition map MAP, calculate all the live on entry bitmaps for |
30928c44 | 1208 | each partition. Return a new live info object. */ |
1209 | ||
48e1416a | 1210 | tree_live_info_p |
30928c44 | 1211 | calculate_live_ranges (var_map map) |
1212 | { | |
1213 | tree var; | |
1214 | unsigned i; | |
1215 | tree_live_info_p live; | |
4ee9c684 | 1216 | |
4fb07d00 | 1217 | bitmap_obstack_initialize (&liveness_bitmap_obstack); |
30928c44 | 1218 | live = new_tree_live_info (map); |
4ee9c684 | 1219 | for (i = 0; i < num_var_partitions (map); i++) |
1220 | { | |
30928c44 | 1221 | var = partition_to_var (map, i); |
1222 | if (var != NULL_TREE) | |
1223 | set_var_live_on_entry (var, live); | |
4ee9c684 | 1224 | } |
1225 | ||
30928c44 | 1226 | live_worklist (live); |
1227 | ||
1228 | #ifdef ENABLE_CHECKING | |
1229 | verify_live_on_entry (live); | |
1230 | #endif | |
1231 | ||
1232 | calculate_live_on_exit (live); | |
1233 | return live; | |
4ee9c684 | 1234 | } |
1235 | ||
1236 | ||
4ee9c684 | 1237 | /* Output partition map MAP to file F. */ |
1238 | ||
1239 | void | |
1240 | dump_var_map (FILE *f, var_map map) | |
1241 | { | |
1242 | int t; | |
1243 | unsigned x, y; | |
1244 | int p; | |
1245 | ||
1246 | fprintf (f, "\nPartition map \n\n"); | |
1247 | ||
1248 | for (x = 0; x < map->num_partitions; x++) | |
1249 | { | |
2d043327 | 1250 | if (map->view_to_partition != NULL) |
1251 | p = map->view_to_partition[x]; | |
4ee9c684 | 1252 | else |
1253 | p = x; | |
1254 | ||
ade7d11b | 1255 | if (ssa_name (p) == NULL_TREE |
1256 | || virtual_operand_p (ssa_name (p))) | |
4ee9c684 | 1257 | continue; |
1258 | ||
1259 | t = 0; | |
c211d998 | 1260 | for (y = 1; y < num_ssa_names; y++) |
4ee9c684 | 1261 | { |
1262 | p = partition_find (map->var_partition, y); | |
2d043327 | 1263 | if (map->partition_to_view) |
1264 | p = map->partition_to_view[p]; | |
4ee9c684 | 1265 | if (p == (int)x) |
1266 | { | |
1267 | if (t++ == 0) | |
1268 | { | |
9af5ce0c | 1269 | fprintf (f, "Partition %d (", x); |
4ee9c684 | 1270 | print_generic_expr (f, partition_to_var (map, p), TDF_SLIM); |
1271 | fprintf (f, " - "); | |
1272 | } | |
1273 | fprintf (f, "%d ", y); | |
1274 | } | |
1275 | } | |
1276 | if (t != 0) | |
1277 | fprintf (f, ")\n"); | |
1278 | } | |
1279 | fprintf (f, "\n"); | |
1280 | } | |
1281 | ||
1282 | ||
c7d89805 | 1283 | /* Generic dump for the above. */ |
1284 | ||
1285 | DEBUG_FUNCTION void | |
1286 | debug (_var_map &ref) | |
1287 | { | |
1288 | dump_var_map (stderr, &ref); | |
1289 | } | |
1290 | ||
1291 | DEBUG_FUNCTION void | |
1292 | debug (_var_map *ptr) | |
1293 | { | |
1294 | if (ptr) | |
1295 | debug (*ptr); | |
1296 | else | |
1297 | fprintf (stderr, "<nil>\n"); | |
1298 | } | |
1299 | ||
1300 | ||
4ee9c684 | 1301 | /* Output live range info LIVE to file F, controlled by FLAG. */ |
1302 | ||
1303 | void | |
1304 | dump_live_info (FILE *f, tree_live_info_p live, int flag) | |
1305 | { | |
1306 | basic_block bb; | |
4f917ffe | 1307 | unsigned i; |
4ee9c684 | 1308 | var_map map = live->map; |
0cc4271a | 1309 | bitmap_iterator bi; |
4ee9c684 | 1310 | |
1311 | if ((flag & LIVEDUMP_ENTRY) && live->livein) | |
1312 | { | |
fc00614f | 1313 | FOR_EACH_BB_FN (bb, cfun) |
4ee9c684 | 1314 | { |
1315 | fprintf (f, "\nLive on entry to BB%d : ", bb->index); | |
4fb07d00 | 1316 | EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi) |
4ee9c684 | 1317 | { |
30928c44 | 1318 | print_generic_expr (f, partition_to_var (map, i), TDF_SLIM); |
1319 | fprintf (f, " "); | |
4ee9c684 | 1320 | } |
1321 | fprintf (f, "\n"); | |
1322 | } | |
1323 | } | |
1324 | ||
1325 | if ((flag & LIVEDUMP_EXIT) && live->liveout) | |
1326 | { | |
fc00614f | 1327 | FOR_EACH_BB_FN (bb, cfun) |
4ee9c684 | 1328 | { |
1329 | fprintf (f, "\nLive on exit from BB%d : ", bb->index); | |
4fb07d00 | 1330 | EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi) |
4ee9c684 | 1331 | { |
1332 | print_generic_expr (f, partition_to_var (map, i), TDF_SLIM); | |
1333 | fprintf (f, " "); | |
0cc4271a | 1334 | } |
4ee9c684 | 1335 | fprintf (f, "\n"); |
1336 | } | |
1337 | } | |
1338 | } | |
8c0963c4 | 1339 | |
c7d89805 | 1340 | |
1341 | /* Generic dump for the above. */ | |
1342 | ||
1343 | DEBUG_FUNCTION void | |
1344 | debug (tree_live_info_d &ref) | |
1345 | { | |
1346 | dump_live_info (stderr, &ref, 0); | |
1347 | } | |
1348 | ||
1349 | DEBUG_FUNCTION void | |
1350 | debug (tree_live_info_d *ptr) | |
1351 | { | |
1352 | if (ptr) | |
1353 | debug (*ptr); | |
1354 | else | |
1355 | fprintf (stderr, "<nil>\n"); | |
1356 | } | |
1357 | ||
1358 | ||
8c0963c4 | 1359 | #ifdef ENABLE_CHECKING |
2d043327 | 1360 | /* Verify that SSA_VAR is a non-virtual SSA_NAME. */ |
1361 | ||
8c0963c4 | 1362 | void |
1363 | register_ssa_partition_check (tree ssa_var) | |
1364 | { | |
1365 | gcc_assert (TREE_CODE (ssa_var) == SSA_NAME); | |
7c782c9b | 1366 | if (virtual_operand_p (ssa_var)) |
8c0963c4 | 1367 | { |
1368 | fprintf (stderr, "Illegally registering a virtual SSA name :"); | |
1369 | print_generic_expr (stderr, ssa_var, TDF_SLIM); | |
1370 | fprintf (stderr, " in the SSA->Normal phase.\n"); | |
1371 | internal_error ("SSA corruption"); | |
1372 | } | |
1373 | } | |
30928c44 | 1374 | |
1375 | ||
1376 | /* Verify that the info in LIVE matches the current cfg. */ | |
2d043327 | 1377 | |
30928c44 | 1378 | static void |
1379 | verify_live_on_entry (tree_live_info_p live) | |
1380 | { | |
1381 | unsigned i; | |
1382 | tree var; | |
75a70cf9 | 1383 | gimple stmt; |
30928c44 | 1384 | basic_block bb; |
1385 | edge e; | |
1386 | int num; | |
1387 | edge_iterator ei; | |
1388 | var_map map = live->map; | |
1389 | ||
1390 | /* Check for live on entry partitions and report those with a DEF in | |
1391 | the program. This will typically mean an optimization has done | |
1392 | something wrong. */ | |
34154e27 | 1393 | bb = ENTRY_BLOCK_PTR_FOR_FN (cfun); |
30928c44 | 1394 | num = 0; |
1395 | FOR_EACH_EDGE (e, ei, bb->succs) | |
1396 | { | |
1397 | int entry_block = e->dest->index; | |
34154e27 | 1398 | if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
30928c44 | 1399 | continue; |
1400 | for (i = 0; i < (unsigned)num_var_partitions (map); i++) | |
1401 | { | |
1402 | basic_block tmp; | |
ec11736b | 1403 | tree d = NULL_TREE; |
30928c44 | 1404 | bitmap loe; |
1405 | var = partition_to_var (map, i); | |
1406 | stmt = SSA_NAME_DEF_STMT (var); | |
75a70cf9 | 1407 | tmp = gimple_bb (stmt); |
ec11736b | 1408 | if (SSA_NAME_VAR (var)) |
1409 | d = ssa_default_def (cfun, SSA_NAME_VAR (var)); | |
30928c44 | 1410 | |
1411 | loe = live_on_entry (live, e->dest); | |
1412 | if (loe && bitmap_bit_p (loe, i)) | |
1413 | { | |
75a70cf9 | 1414 | if (!gimple_nop_p (stmt)) |
30928c44 | 1415 | { |
1416 | num++; | |
1417 | print_generic_expr (stderr, var, TDF_SLIM); | |
1418 | fprintf (stderr, " is defined "); | |
1419 | if (tmp) | |
1420 | fprintf (stderr, " in BB%d, ", tmp->index); | |
1421 | fprintf (stderr, "by:\n"); | |
75a70cf9 | 1422 | print_gimple_stmt (stderr, stmt, 0, TDF_SLIM); |
48e1416a | 1423 | fprintf (stderr, "\nIt is also live-on-entry to entry BB %d", |
30928c44 | 1424 | entry_block); |
1425 | fprintf (stderr, " So it appears to have multiple defs.\n"); | |
1426 | } | |
1427 | else | |
1428 | { | |
1429 | if (d != var) | |
1430 | { | |
1431 | num++; | |
1432 | print_generic_expr (stderr, var, TDF_SLIM); | |
75a70cf9 | 1433 | fprintf (stderr, " is live-on-entry to BB%d ", |
1434 | entry_block); | |
30928c44 | 1435 | if (d) |
1436 | { | |
1437 | fprintf (stderr, " but is not the default def of "); | |
1438 | print_generic_expr (stderr, d, TDF_SLIM); | |
1439 | fprintf (stderr, "\n"); | |
1440 | } | |
1441 | else | |
1442 | fprintf (stderr, " and there is no default def.\n"); | |
1443 | } | |
1444 | } | |
1445 | } | |
1446 | else | |
1447 | if (d == var) | |
1448 | { | |
4d8d655b | 1449 | /* An undefined local variable does not need to be very |
1450 | alive. */ | |
1451 | if (ssa_undefined_value_p (var, false)) | |
1452 | continue; | |
1453 | ||
48e1416a | 1454 | /* The only way this var shouldn't be marked live on entry is |
30928c44 | 1455 | if it occurs in a PHI argument of the block. */ |
75a70cf9 | 1456 | size_t z; |
1457 | bool ok = false; | |
1a91d914 | 1458 | gphi_iterator gsi; |
75a70cf9 | 1459 | for (gsi = gsi_start_phis (e->dest); |
1460 | !gsi_end_p (gsi) && !ok; | |
1461 | gsi_next (&gsi)) | |
30928c44 | 1462 | { |
1a91d914 | 1463 | gphi *phi = gsi.phi (); |
75a70cf9 | 1464 | for (z = 0; z < gimple_phi_num_args (phi); z++) |
1465 | if (var == gimple_phi_arg_def (phi, z)) | |
30928c44 | 1466 | { |
75a70cf9 | 1467 | ok = true; |
30928c44 | 1468 | break; |
1469 | } | |
1470 | } | |
1471 | if (ok) | |
1472 | continue; | |
1473 | num++; | |
1474 | print_generic_expr (stderr, var, TDF_SLIM); | |
48e1416a | 1475 | fprintf (stderr, " is not marked live-on-entry to entry BB%d ", |
30928c44 | 1476 | entry_block); |
1477 | fprintf (stderr, "but it is a default def so it should be.\n"); | |
1478 | } | |
1479 | } | |
1480 | } | |
1481 | gcc_assert (num <= 0); | |
1482 | } | |
8c0963c4 | 1483 | #endif |