]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* Liveness for SSA trees. |
d1e082c2 | 2 | Copyright (C) 2003-2013 Free Software Foundation, Inc. |
6de9cd9a DN |
3 | Contributed by Andrew MacLeod <amacleod@redhat.com> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 9 | the Free Software Foundation; either version 3, or (at your option) |
6de9cd9a DN |
10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a DN |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
4a8fb1a1 | 24 | #include "hash-table.h" |
6de9cd9a DN |
25 | #include "tm.h" |
26 | #include "tree.h" | |
cf835838 | 27 | #include "gimple-pretty-print.h" |
6de9cd9a | 28 | #include "bitmap.h" |
4484a35a | 29 | #include "sbitmap.h" |
442b4905 AM |
30 | #include "gimple.h" |
31 | #include "gimple-ssa.h" | |
32 | #include "tree-phinodes.h" | |
33 | #include "ssa-iterators.h" | |
34 | #include "tree-ssanames.h" | |
35 | #include "tree-dfa.h" | |
7ee2468b SB |
36 | #include "timevar.h" |
37 | #include "dumpfile.h" | |
6de9cd9a | 38 | #include "tree-ssa-live.h" |
718f9c0f | 39 | #include "diagnostic-core.h" |
aee46bbd JH |
40 | #include "debug.h" |
41 | #include "flags.h" | |
7290d709 | 42 | |
32ace6e2 AM |
43 | #ifdef ENABLE_CHECKING |
44 | static void verify_live_on_entry (tree_live_info_p); | |
45 | #endif | |
6de9cd9a | 46 | |
6de9cd9a | 47 | |
7290d709 AM |
48 | /* VARMAP maintains a mapping from SSA version number to real variables. |
49 | ||
50 | All SSA_NAMES are divided into partitions. Initially each ssa_name is the | |
51 | only member of it's own partition. Coalescing will attempt to group any | |
52 | ssa_names which occur in a copy or in a PHI node into the same partition. | |
53 | ||
54 | At the end of out-of-ssa, each partition becomes a "real" variable and is | |
55 | rewritten as a compiler variable. | |
56 | ||
fa10beec | 57 | The var_map data structure is used to manage these partitions. It allows |
7290d709 AM |
58 | partitions to be combined, and determines which partition belongs to what |
59 | ssa_name or variable, and vice versa. */ | |
60 | ||
61 | ||
4a8fb1a1 LC |
62 | /* Hashtable helpers. */ |
63 | ||
64 | struct tree_int_map_hasher : typed_noop_remove <tree_int_map> | |
65 | { | |
66 | typedef tree_int_map value_type; | |
67 | typedef tree_int_map compare_type; | |
68 | static inline hashval_t hash (const value_type *); | |
69 | static inline bool equal (const value_type *, const compare_type *); | |
70 | }; | |
71 | ||
72 | inline hashval_t | |
73 | tree_int_map_hasher::hash (const value_type *v) | |
74 | { | |
75 | return tree_map_base_hash (v); | |
76 | } | |
77 | ||
78 | inline bool | |
79 | tree_int_map_hasher::equal (const value_type *v, const compare_type *c) | |
80 | { | |
81 | return tree_int_map_eq (v, c); | |
82 | } | |
83 | ||
84 | ||
7290d709 AM |
85 | /* This routine will initialize the basevar fields of MAP. */ |
86 | ||
87 | static void | |
88 | var_map_base_init (var_map map) | |
89 | { | |
5f564b8f | 90 | int x, num_part; |
7290d709 | 91 | tree var; |
4a8fb1a1 | 92 | hash_table <tree_int_map_hasher> tree_to_index; |
5f564b8f | 93 | struct tree_int_map *m, *mapstorage; |
b8698a0f | 94 | |
7290d709 | 95 | num_part = num_var_partitions (map); |
4a8fb1a1 | 96 | tree_to_index.create (num_part); |
5f564b8f MM |
97 | /* We can have at most num_part entries in the hash tables, so it's |
98 | enough to allocate so many map elements once, saving some malloc | |
99 | calls. */ | |
100 | mapstorage = m = XNEWVEC (struct tree_int_map, num_part); | |
7290d709 AM |
101 | |
102 | /* If a base table already exists, clear it, otherwise create it. */ | |
5f564b8f | 103 | free (map->partition_to_base_index); |
7290d709 AM |
104 | map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part); |
105 | ||
106 | /* Build the base variable list, and point partitions at their bases. */ | |
107 | for (x = 0; x < num_part; x++) | |
108 | { | |
5f564b8f MM |
109 | struct tree_int_map **slot; |
110 | unsigned baseindex; | |
7290d709 | 111 | var = partition_to_var (map, x); |
a2544177 RB |
112 | if (SSA_NAME_VAR (var) |
113 | && (!VAR_P (SSA_NAME_VAR (var)) | |
114 | || !DECL_IGNORED_P (SSA_NAME_VAR (var)))) | |
70b5e7dc RG |
115 | m->base.from = SSA_NAME_VAR (var); |
116 | else | |
117 | /* This restricts what anonymous SSA names we can coalesce | |
118 | as it restricts the sets we compute conflicts for. | |
119 | Using TREE_TYPE to generate sets is the easies as | |
120 | type equivalency also holds for SSA names with the same | |
e91d0adb JL |
121 | underlying decl. |
122 | ||
123 | Check gimple_can_coalesce_p when changing this code. */ | |
124 | m->base.from = (TYPE_CANONICAL (TREE_TYPE (var)) | |
125 | ? TYPE_CANONICAL (TREE_TYPE (var)) | |
126 | : TREE_TYPE (var)); | |
7290d709 | 127 | /* If base variable hasn't been seen, set it up. */ |
4a8fb1a1 | 128 | slot = tree_to_index.find_slot (m, INSERT); |
5f564b8f MM |
129 | if (!*slot) |
130 | { | |
131 | baseindex = m - mapstorage; | |
132 | m->to = baseindex; | |
133 | *slot = m; | |
134 | m++; | |
7290d709 | 135 | } |
5f564b8f MM |
136 | else |
137 | baseindex = (*slot)->to; | |
138 | map->partition_to_base_index[x] = baseindex; | |
7290d709 AM |
139 | } |
140 | ||
5f564b8f | 141 | map->num_basevars = m - mapstorage; |
7290d709 | 142 | |
5f564b8f | 143 | free (mapstorage); |
4a8fb1a1 | 144 | tree_to_index. dispose (); |
7290d709 AM |
145 | } |
146 | ||
6de9cd9a | 147 | |
7290d709 | 148 | /* Remove the base table in MAP. */ |
6de9cd9a | 149 | |
7290d709 AM |
150 | static void |
151 | var_map_base_fini (var_map map) | |
152 | { | |
153 | /* Free the basevar info if it is present. */ | |
154 | if (map->partition_to_base_index != NULL) | |
155 | { | |
7290d709 AM |
156 | free (map->partition_to_base_index); |
157 | map->partition_to_base_index = NULL; | |
158 | map->num_basevars = 0; | |
159 | } | |
160 | } | |
6de9cd9a DN |
161 | /* Create a variable partition map of SIZE, initialize and return it. */ |
162 | ||
163 | var_map | |
164 | init_var_map (int size) | |
165 | { | |
166 | var_map map; | |
167 | ||
168 | map = (var_map) xmalloc (sizeof (struct _var_map)); | |
169 | map->var_partition = partition_new (size); | |
6de9cd9a | 170 | |
7290d709 AM |
171 | map->partition_to_view = NULL; |
172 | map->view_to_partition = NULL; | |
6de9cd9a DN |
173 | map->num_partitions = size; |
174 | map->partition_size = size; | |
7290d709 AM |
175 | map->num_basevars = 0; |
176 | map->partition_to_base_index = NULL; | |
6de9cd9a DN |
177 | return map; |
178 | } | |
179 | ||
180 | ||
181 | /* Free memory associated with MAP. */ | |
182 | ||
183 | void | |
184 | delete_var_map (var_map map) | |
185 | { | |
7290d709 | 186 | var_map_base_fini (map); |
6de9cd9a | 187 | partition_delete (map->var_partition); |
04695783 JM |
188 | free (map->partition_to_view); |
189 | free (map->view_to_partition); | |
6de9cd9a DN |
190 | free (map); |
191 | } | |
192 | ||
193 | ||
b8698a0f L |
194 | /* This function will combine the partitions in MAP for VAR1 and VAR2. It |
195 | Returns the partition which represents the new partition. If the two | |
9cf737f8 | 196 | partitions cannot be combined, NO_PARTITION is returned. */ |
6de9cd9a DN |
197 | |
198 | int | |
199 | var_union (var_map map, tree var1, tree var2) | |
200 | { | |
201 | int p1, p2, p3; | |
4e3825db MM |
202 | |
203 | gcc_assert (TREE_CODE (var1) == SSA_NAME); | |
204 | gcc_assert (TREE_CODE (var2) == SSA_NAME); | |
6de9cd9a | 205 | |
b8698a0f | 206 | /* This is independent of partition_to_view. If partition_to_view is |
6de9cd9a | 207 | on, then whichever one of these partitions is absorbed will never have a |
7290d709 | 208 | dereference into the partition_to_view array any more. */ |
6de9cd9a | 209 | |
4e3825db MM |
210 | p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1)); |
211 | p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2)); | |
6de9cd9a | 212 | |
1e128c5f GB |
213 | gcc_assert (p1 != NO_PARTITION); |
214 | gcc_assert (p2 != NO_PARTITION); | |
6de9cd9a DN |
215 | |
216 | if (p1 == p2) | |
217 | p3 = p1; | |
218 | else | |
219 | p3 = partition_union (map->var_partition, p1, p2); | |
220 | ||
7290d709 AM |
221 | if (map->partition_to_view) |
222 | p3 = map->partition_to_view[p3]; | |
6de9cd9a | 223 | |
6de9cd9a DN |
224 | return p3; |
225 | } | |
226 | ||
b8698a0f L |
227 | |
228 | /* Compress the partition numbers in MAP such that they fall in the range | |
6de9cd9a DN |
229 | 0..(num_partitions-1) instead of wherever they turned out during |
230 | the partitioning exercise. This removes any references to unused | |
231 | partitions, thereby allowing bitmaps and other vectors to be much | |
b8698a0f | 232 | denser. |
6de9cd9a DN |
233 | |
234 | This is implemented such that compaction doesn't affect partitioning. | |
235 | Ie., once partitions are created and possibly merged, running one | |
236 | or more different kind of compaction will not affect the partitions | |
237 | themselves. Their index might change, but all the same variables will | |
238 | still be members of the same partition group. This allows work on reduced | |
239 | sets, and no loss of information when a larger set is later desired. | |
240 | ||
241 | In particular, coalescing can work on partitions which have 2 or more | |
242 | definitions, and then 'recompact' later to include all the single | |
243 | definitions for assignment to program variables. */ | |
244 | ||
7290d709 | 245 | |
b8698a0f L |
246 | /* Set MAP back to the initial state of having no partition view. Return a |
247 | bitmap which has a bit set for each partition number which is in use in the | |
7290d709 AM |
248 | varmap. */ |
249 | ||
250 | static bitmap | |
251 | partition_view_init (var_map map) | |
6de9cd9a | 252 | { |
7290d709 AM |
253 | bitmap used; |
254 | int tmp; | |
255 | unsigned int x; | |
6de9cd9a | 256 | |
7290d709 | 257 | used = BITMAP_ALLOC (NULL); |
6de9cd9a | 258 | |
7290d709 AM |
259 | /* Already in a view? Abandon the old one. */ |
260 | if (map->partition_to_view) | |
6de9cd9a | 261 | { |
7290d709 AM |
262 | free (map->partition_to_view); |
263 | map->partition_to_view = NULL; | |
6de9cd9a | 264 | } |
7290d709 | 265 | if (map->view_to_partition) |
6de9cd9a | 266 | { |
7290d709 AM |
267 | free (map->view_to_partition); |
268 | map->view_to_partition = NULL; | |
6de9cd9a DN |
269 | } |
270 | ||
6de9cd9a | 271 | /* Find out which partitions are actually referenced. */ |
7290d709 | 272 | for (x = 0; x < map->partition_size; x++) |
6de9cd9a DN |
273 | { |
274 | tmp = partition_find (map->var_partition, x); | |
ea057359 | 275 | if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp)) |
4e3825db MM |
276 | && (!has_zero_uses (ssa_name (tmp)) |
277 | || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp)))) | |
7290d709 | 278 | bitmap_set_bit (used, tmp); |
6de9cd9a DN |
279 | } |
280 | ||
7290d709 AM |
281 | map->num_partitions = map->partition_size; |
282 | return used; | |
283 | } | |
284 | ||
285 | ||
286 | /* This routine will finalize the view data for MAP based on the partitions | |
b8698a0f | 287 | set in SELECTED. This is either the same bitmap returned from |
7290d709 AM |
288 | partition_view_init, or a trimmed down version if some of those partitions |
289 | were not desired in this view. SELECTED is freed before returning. */ | |
290 | ||
b8698a0f | 291 | static void |
7290d709 AM |
292 | partition_view_fini (var_map map, bitmap selected) |
293 | { | |
294 | bitmap_iterator bi; | |
295 | unsigned count, i, x, limit; | |
7290d709 AM |
296 | |
297 | gcc_assert (selected); | |
298 | ||
299 | count = bitmap_count_bits (selected); | |
300 | limit = map->partition_size; | |
301 | ||
302 | /* If its a one-to-one ratio, we don't need any view compaction. */ | |
303 | if (count < limit) | |
6de9cd9a | 304 | { |
7290d709 AM |
305 | map->partition_to_view = (int *)xmalloc (limit * sizeof (int)); |
306 | memset (map->partition_to_view, 0xff, (limit * sizeof (int))); | |
307 | map->view_to_partition = (int *)xmalloc (count * sizeof (int)); | |
b6e7e9af | 308 | |
7290d709 AM |
309 | i = 0; |
310 | /* Give each selected partition an index. */ | |
311 | EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi) | |
6de9cd9a | 312 | { |
7290d709 AM |
313 | map->partition_to_view[x] = i; |
314 | map->view_to_partition[i] = x; | |
7290d709 | 315 | i++; |
b6e7e9af | 316 | } |
7290d709 AM |
317 | gcc_assert (i == count); |
318 | map->num_partitions = i; | |
6de9cd9a | 319 | } |
7290d709 AM |
320 | |
321 | BITMAP_FREE (selected); | |
322 | } | |
323 | ||
324 | ||
b8698a0f | 325 | /* Create a partition view which includes all the used partitions in MAP. If |
7290d709 AM |
326 | WANT_BASES is true, create the base variable map as well. */ |
327 | ||
3f9b14ff | 328 | void |
7290d709 AM |
329 | partition_view_normal (var_map map, bool want_bases) |
330 | { | |
331 | bitmap used; | |
332 | ||
333 | used = partition_view_init (map); | |
334 | partition_view_fini (map, used); | |
335 | ||
336 | if (want_bases) | |
337 | var_map_base_init (map); | |
6de9cd9a | 338 | else |
7290d709 AM |
339 | var_map_base_fini (map); |
340 | } | |
341 | ||
342 | ||
b8698a0f L |
343 | /* Create a partition view in MAP which includes just partitions which occur in |
344 | the bitmap ONLY. If WANT_BASES is true, create the base variable map | |
7290d709 AM |
345 | as well. */ |
346 | ||
3f9b14ff | 347 | void |
7290d709 AM |
348 | partition_view_bitmap (var_map map, bitmap only, bool want_bases) |
349 | { | |
350 | bitmap used; | |
351 | bitmap new_partitions = BITMAP_ALLOC (NULL); | |
352 | unsigned x, p; | |
353 | bitmap_iterator bi; | |
354 | ||
355 | used = partition_view_init (map); | |
356 | EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi) | |
6de9cd9a | 357 | { |
7290d709 AM |
358 | p = partition_find (map->var_partition, x); |
359 | gcc_assert (bitmap_bit_p (used, p)); | |
360 | bitmap_set_bit (new_partitions, p); | |
6de9cd9a | 361 | } |
7290d709 | 362 | partition_view_fini (map, new_partitions); |
6de9cd9a | 363 | |
7290d709 AM |
364 | if (want_bases) |
365 | var_map_base_init (map); | |
366 | else | |
367 | var_map_base_fini (map); | |
6de9cd9a DN |
368 | } |
369 | ||
370 | ||
5f564b8f MM |
371 | static bitmap usedvars; |
372 | ||
329df070 MM |
373 | /* Mark VAR as used, so that it'll be preserved during rtl expansion. |
374 | Returns true if VAR wasn't marked before. */ | |
5f564b8f | 375 | |
329df070 | 376 | static inline bool |
5f564b8f MM |
377 | set_is_used (tree var) |
378 | { | |
329df070 | 379 | return bitmap_set_bit (usedvars, DECL_UID (var)); |
5f564b8f MM |
380 | } |
381 | ||
382 | /* Return true if VAR is marked as used. */ | |
383 | ||
384 | static inline bool | |
385 | is_used_p (tree var) | |
386 | { | |
387 | return bitmap_bit_p (usedvars, DECL_UID (var)); | |
388 | } | |
389 | ||
329df070 | 390 | static inline void mark_all_vars_used (tree *); |
6de9cd9a | 391 | |
727a31fa RH |
392 | /* Helper function for mark_all_vars_used, called via walk_tree. */ |
393 | ||
394 | static tree | |
329df070 | 395 | mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
727a31fa RH |
396 | { |
397 | tree t = *tp; | |
aee46bbd JH |
398 | enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t)); |
399 | tree b; | |
727a31fa | 400 | |
3f519b35 | 401 | if (TREE_CODE (t) == SSA_NAME) |
70b5e7dc RG |
402 | { |
403 | *walk_subtrees = 0; | |
404 | t = SSA_NAME_VAR (t); | |
405 | if (!t) | |
406 | return NULL; | |
407 | } | |
726a989a RB |
408 | |
409 | if (IS_EXPR_CODE_CLASS (c) | |
aee46bbd JH |
410 | && (b = TREE_BLOCK (t)) != NULL) |
411 | TREE_USED (b) = true; | |
3f519b35 | 412 | |
4d948885 RG |
413 | /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those |
414 | fields do not contain vars. */ | |
89632019 ZD |
415 | if (TREE_CODE (t) == TARGET_MEM_REF) |
416 | { | |
329df070 MM |
417 | mark_all_vars_used (&TMR_BASE (t)); |
418 | mark_all_vars_used (&TMR_INDEX (t)); | |
419 | mark_all_vars_used (&TMR_INDEX2 (t)); | |
89632019 ZD |
420 | *walk_subtrees = 0; |
421 | return NULL; | |
422 | } | |
423 | ||
727a31fa RH |
424 | /* Only need to mark VAR_DECLS; parameters and return results are not |
425 | eliminated as unused. */ | |
426 | if (TREE_CODE (t) == VAR_DECL) | |
900461f3 | 427 | { |
329df070 MM |
428 | /* When a global var becomes used for the first time also walk its |
429 | initializer (non global ones don't have any). */ | |
430 | if (set_is_used (t) && is_global_var (t)) | |
431 | mark_all_vars_used (&DECL_INITIAL (t)); | |
900461f3 | 432 | } |
ccf134c7 RG |
433 | /* remove_unused_scope_block_p requires information about labels |
434 | which are not DECL_IGNORED_P to tell if they might be used in the IL. */ | |
9c09f152 | 435 | else if (TREE_CODE (t) == LABEL_DECL) |
ccf134c7 RG |
436 | /* Although the TREE_USED values that the frontend uses would be |
437 | acceptable (albeit slightly over-conservative) for our purposes, | |
438 | init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we | |
439 | must re-compute it here. */ | |
440 | TREE_USED (t) = 1; | |
727a31fa | 441 | |
6615c446 | 442 | if (IS_TYPE_OR_DECL_P (t)) |
727a31fa RH |
443 | *walk_subtrees = 0; |
444 | ||
445 | return NULL; | |
446 | } | |
447 | ||
aee46bbd JH |
448 | /* Mark the scope block SCOPE and its subblocks unused when they can be |
449 | possibly eliminated if dead. */ | |
450 | ||
451 | static void | |
452 | mark_scope_block_unused (tree scope) | |
453 | { | |
454 | tree t; | |
455 | TREE_USED (scope) = false; | |
456 | if (!(*debug_hooks->ignore_block) (scope)) | |
457 | TREE_USED (scope) = true; | |
458 | for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t)) | |
459 | mark_scope_block_unused (t); | |
460 | } | |
461 | ||
462 | /* Look if the block is dead (by possibly eliminating its dead subblocks) | |
b8698a0f | 463 | and return true if so. |
aee46bbd JH |
464 | Block is declared dead if: |
465 | 1) No statements are associated with it. | |
466 | 2) Declares no live variables | |
467 | 3) All subblocks are dead | |
468 | or there is precisely one subblocks and the block | |
469 | has same abstract origin as outer block and declares | |
470 | no variables, so it is pure wrapper. | |
cea618ac | 471 | When we are not outputting full debug info, we also eliminate dead variables |
aee46bbd JH |
472 | out of scope blocks to let them to be recycled by GGC and to save copying work |
473 | done by the inliner. */ | |
474 | ||
475 | static bool | |
329df070 | 476 | remove_unused_scope_block_p (tree scope) |
aee46bbd | 477 | { |
d1aed86b | 478 | tree *t, *next; |
aee46bbd | 479 | bool unused = !TREE_USED (scope); |
aee46bbd JH |
480 | int nsubblocks = 0; |
481 | ||
d1aed86b AO |
482 | for (t = &BLOCK_VARS (scope); *t; t = next) |
483 | { | |
910ad8de | 484 | next = &DECL_CHAIN (*t); |
d1aed86b AO |
485 | |
486 | /* Debug info of nested function refers to the block of the | |
61e04322 JH |
487 | function. We might stil call it even if all statements |
488 | of function it was nested into was elliminated. | |
b8698a0f | 489 | |
61e04322 JH |
490 | TODO: We can actually look into cgraph to see if function |
491 | will be output to file. */ | |
d1aed86b AO |
492 | if (TREE_CODE (*t) == FUNCTION_DECL) |
493 | unused = false; | |
2ce59df7 AO |
494 | |
495 | /* If a decl has a value expr, we need to instantiate it | |
496 | regardless of debug info generation, to avoid codegen | |
497 | differences in memory overlap tests. update_equiv_regs() may | |
498 | indirectly call validate_equiv_mem() to test whether a | |
499 | SET_DEST overlaps with others, and if the value expr changes | |
500 | by virtual register instantiation, we may get end up with | |
501 | different results. */ | |
502 | else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t)) | |
503 | unused = false; | |
504 | ||
a300e459 EB |
505 | /* Remove everything we don't generate debug info for. */ |
506 | else if (DECL_IGNORED_P (*t)) | |
b91bc349 | 507 | { |
910ad8de | 508 | *t = DECL_CHAIN (*t); |
b91bc349 JH |
509 | next = t; |
510 | } | |
511 | ||
d1aed86b AO |
512 | /* When we are outputting debug info, we usually want to output |
513 | info about optimized-out variables in the scope blocks. | |
514 | Exception are the scope blocks not containing any instructions | |
515 | at all so user can't get into the scopes at first place. */ | |
329df070 | 516 | else if (is_used_p (*t)) |
d1aed86b | 517 | unused = false; |
ccf134c7 RG |
518 | else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t)) |
519 | /* For labels that are still used in the IL, the decision to | |
520 | preserve them must not depend DEBUG_INFO_LEVEL, otherwise we | |
521 | risk having different ordering in debug vs. non-debug builds | |
522 | during inlining or versioning. | |
523 | A label appearing here (we have already checked DECL_IGNORED_P) | |
524 | should not be used in the IL unless it has been explicitly used | |
525 | before, so we use TREE_USED as an approximation. */ | |
526 | /* In principle, we should do the same here as for the debug case | |
527 | below, however, when debugging, there might be additional nested | |
528 | levels that keep an upper level with a label live, so we have to | |
529 | force this block to be considered used, too. */ | |
530 | unused = false; | |
d1aed86b AO |
531 | |
532 | /* When we are not doing full debug info, we however can keep around | |
533 | only the used variables for cfgexpand's memory packing saving quite | |
b8698a0f | 534 | a lot of memory. |
61e04322 JH |
535 | |
536 | For sake of -g3, we keep around those vars but we don't count this as | |
537 | use of block, so innermost block with no used vars and no instructions | |
538 | can be considered dead. We only want to keep around blocks user can | |
b8698a0f | 539 | breakpoint into and ask about value of optimized out variables. |
61e04322 | 540 | |
427f6cec AO |
541 | Similarly we need to keep around types at least until all |
542 | variables of all nested blocks are gone. We track no | |
543 | information on whether given type is used or not, so we have | |
544 | to keep them even when not emitting debug information, | |
545 | otherwise we may end up remapping variables and their (local) | |
546 | types in different orders depending on whether debug | |
547 | information is being generated. */ | |
548 | ||
549 | else if (TREE_CODE (*t) == TYPE_DECL | |
550 | || debug_info_level == DINFO_LEVEL_NORMAL | |
c7ba0cfb | 551 | || debug_info_level == DINFO_LEVEL_VERBOSE) |
61e04322 | 552 | ; |
3c8da8a5 | 553 | else |
d1aed86b | 554 | { |
910ad8de | 555 | *t = DECL_CHAIN (*t); |
d1aed86b AO |
556 | next = t; |
557 | } | |
558 | } | |
559 | ||
aee46bbd | 560 | for (t = &BLOCK_SUBBLOCKS (scope); *t ;) |
329df070 | 561 | if (remove_unused_scope_block_p (*t)) |
aee46bbd JH |
562 | { |
563 | if (BLOCK_SUBBLOCKS (*t)) | |
564 | { | |
565 | tree next = BLOCK_CHAIN (*t); | |
566 | tree supercontext = BLOCK_SUPERCONTEXT (*t); | |
cff7525f | 567 | |
aee46bbd | 568 | *t = BLOCK_SUBBLOCKS (*t); |
cff7525f JH |
569 | while (BLOCK_CHAIN (*t)) |
570 | { | |
571 | BLOCK_SUPERCONTEXT (*t) = supercontext; | |
572 | t = &BLOCK_CHAIN (*t); | |
573 | } | |
aee46bbd JH |
574 | BLOCK_CHAIN (*t) = next; |
575 | BLOCK_SUPERCONTEXT (*t) = supercontext; | |
576 | t = &BLOCK_CHAIN (*t); | |
577 | nsubblocks ++; | |
578 | } | |
579 | else | |
61e04322 | 580 | *t = BLOCK_CHAIN (*t); |
aee46bbd JH |
581 | } |
582 | else | |
583 | { | |
584 | t = &BLOCK_CHAIN (*t); | |
585 | nsubblocks ++; | |
586 | } | |
cff7525f JH |
587 | |
588 | ||
589 | if (!unused) | |
590 | ; | |
aee46bbd | 591 | /* Outer scope is always used. */ |
cff7525f JH |
592 | else if (!BLOCK_SUPERCONTEXT (scope) |
593 | || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL) | |
aee46bbd | 594 | unused = false; |
cff7525f JH |
595 | /* Innermost blocks with no live variables nor statements can be always |
596 | eliminated. */ | |
597 | else if (!nsubblocks) | |
598 | ; | |
cff7525f JH |
599 | /* For terse debug info we can eliminate info on unused variables. */ |
600 | else if (debug_info_level == DINFO_LEVEL_NONE | |
601 | || debug_info_level == DINFO_LEVEL_TERSE) | |
9f706f23 JJ |
602 | { |
603 | /* Even for -g0/-g1 don't prune outer scopes from artificial | |
604 | functions, otherwise diagnostics using tree_nonartificial_location | |
605 | will not be emitted properly. */ | |
606 | if (inlined_function_outer_scope_p (scope)) | |
607 | { | |
608 | tree ao = scope; | |
609 | ||
610 | while (ao | |
611 | && TREE_CODE (ao) == BLOCK | |
612 | && BLOCK_ABSTRACT_ORIGIN (ao) != ao) | |
613 | ao = BLOCK_ABSTRACT_ORIGIN (ao); | |
614 | if (ao | |
615 | && TREE_CODE (ao) == FUNCTION_DECL | |
616 | && DECL_DECLARED_INLINE_P (ao) | |
617 | && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao))) | |
618 | unused = false; | |
619 | } | |
620 | } | |
526d73ab | 621 | else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope)) |
aee46bbd | 622 | unused = false; |
cff7525f JH |
623 | /* See if this block is important for representation of inlined function. |
624 | Inlined functions are always represented by block with | |
625 | block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION | |
626 | set... */ | |
627 | else if (inlined_function_outer_scope_p (scope)) | |
628 | unused = false; | |
629 | else | |
630 | /* Verfify that only blocks with source location set | |
631 | are entry points to the inlined functions. */ | |
2f13f2de DC |
632 | gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) |
633 | == UNKNOWN_LOCATION); | |
5b4c43d7 EB |
634 | |
635 | TREE_USED (scope) = !unused; | |
aee46bbd JH |
636 | return unused; |
637 | } | |
7290d709 | 638 | |
b8698a0f | 639 | /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be |
727a31fa RH |
640 | eliminated during the tree->rtl conversion process. */ |
641 | ||
642 | static inline void | |
329df070 | 643 | mark_all_vars_used (tree *expr_p) |
727a31fa | 644 | { |
329df070 | 645 | walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL); |
727a31fa RH |
646 | } |
647 | ||
5368224f DC |
648 | /* Helper function for clear_unused_block_pointer, called via walk_tree. */ |
649 | ||
650 | static tree | |
651 | clear_unused_block_pointer_1 (tree *tp, int *, void *) | |
652 | { | |
653 | if (EXPR_P (*tp) && TREE_BLOCK (*tp) | |
654 | && !TREE_USED (TREE_BLOCK (*tp))) | |
655 | TREE_SET_BLOCK (*tp, NULL); | |
5368224f DC |
656 | return NULL_TREE; |
657 | } | |
658 | ||
5d751b0c JJ |
659 | /* Set all block pointer in debug or clobber stmt to NULL if the block |
660 | is unused, so that they will not be streamed out. */ | |
5368224f DC |
661 | |
662 | static void | |
6040bb5f | 663 | clear_unused_block_pointer (void) |
5368224f DC |
664 | { |
665 | basic_block bb; | |
666 | gimple_stmt_iterator gsi; | |
28957eab | 667 | |
5368224f DC |
668 | FOR_EACH_BB (bb) |
669 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
670 | { | |
671 | unsigned i; | |
672 | tree b; | |
673 | gimple stmt = gsi_stmt (gsi); | |
674 | ||
5d751b0c | 675 | if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt)) |
5368224f DC |
676 | continue; |
677 | b = gimple_block (stmt); | |
678 | if (b && !TREE_USED (b)) | |
679 | gimple_set_block (stmt, NULL); | |
680 | for (i = 0; i < gimple_num_ops (stmt); i++) | |
681 | walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1, | |
682 | NULL, NULL); | |
683 | } | |
684 | } | |
ac80ba07 DN |
685 | |
686 | /* Dump scope blocks starting at SCOPE to FILE. INDENT is the | |
687 | indentation level and FLAGS is as in print_generic_expr. */ | |
61e04322 JH |
688 | |
689 | static void | |
690 | dump_scope_block (FILE *file, int indent, tree scope, int flags) | |
691 | { | |
692 | tree var, t; | |
526d73ab | 693 | unsigned int i; |
61e04322 | 694 | |
cff7525f JH |
695 | fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope), |
696 | TREE_USED (scope) ? "" : " (unused)", | |
697 | BLOCK_ABSTRACT (scope) ? " (abstract)": ""); | |
2f13f2de | 698 | if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION) |
cff7525f JH |
699 | { |
700 | expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope)); | |
701 | fprintf (file, " %s:%i", s.file, s.line); | |
702 | } | |
703 | if (BLOCK_ABSTRACT_ORIGIN (scope)) | |
61e04322 | 704 | { |
cff7525f JH |
705 | tree origin = block_ultimate_origin (scope); |
706 | if (origin) | |
707 | { | |
708 | fprintf (file, " Originating from :"); | |
709 | if (DECL_P (origin)) | |
710 | print_generic_decl (file, origin, flags); | |
711 | else | |
712 | fprintf (file, "#%i", BLOCK_NUMBER (origin)); | |
713 | } | |
61e04322 | 714 | } |
cff7525f | 715 | fprintf (file, " \n"); |
910ad8de | 716 | for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var)) |
61e04322 | 717 | { |
83d70792 | 718 | fprintf (file, "%*s", indent, ""); |
61e04322 | 719 | print_generic_decl (file, var, flags); |
5f564b8f | 720 | fprintf (file, "\n"); |
61e04322 | 721 | } |
526d73ab JH |
722 | for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++) |
723 | { | |
724 | fprintf (file, "%*s",indent, ""); | |
725 | print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i), | |
726 | flags); | |
727 | fprintf (file, " (nonlocalized)\n"); | |
728 | } | |
61e04322 JH |
729 | for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t)) |
730 | dump_scope_block (file, indent + 2, t, flags); | |
cff7525f | 731 | fprintf (file, "\n%*s}\n",indent, ""); |
61e04322 JH |
732 | } |
733 | ||
c6a21142 RG |
734 | /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS |
735 | is as in print_generic_expr. */ | |
736 | ||
24e47c76 | 737 | DEBUG_FUNCTION void |
c6a21142 RG |
738 | debug_scope_block (tree scope, int flags) |
739 | { | |
740 | dump_scope_block (stderr, 0, scope, flags); | |
741 | } | |
742 | ||
ac80ba07 DN |
743 | |
744 | /* Dump the tree of lexical scopes of current_function_decl to FILE. | |
745 | FLAGS is as in print_generic_expr. */ | |
746 | ||
cff7525f JH |
747 | void |
748 | dump_scope_blocks (FILE *file, int flags) | |
749 | { | |
750 | dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags); | |
751 | } | |
3f519b35 | 752 | |
ac80ba07 DN |
753 | |
754 | /* Dump the tree of lexical scopes of current_function_decl to stderr. | |
755 | FLAGS is as in print_generic_expr. */ | |
756 | ||
24e47c76 | 757 | DEBUG_FUNCTION void |
ac80ba07 DN |
758 | debug_scope_blocks (int flags) |
759 | { | |
760 | dump_scope_blocks (stderr, flags); | |
761 | } | |
762 | ||
3f519b35 RG |
763 | /* Remove local variables that are not referenced in the IL. */ |
764 | ||
765 | void | |
766 | remove_unused_locals (void) | |
767 | { | |
768 | basic_block bb; | |
46eb666a | 769 | tree var; |
329df070 | 770 | unsigned srcidx, dstidx, num; |
47598145 | 771 | bool have_local_clobbers = false; |
3f519b35 | 772 | |
89fbbe4e AO |
773 | /* Removing declarations from lexical blocks when not optimizing is |
774 | not only a waste of time, it actually causes differences in stack | |
775 | layout. */ | |
776 | if (!optimize) | |
777 | return; | |
778 | ||
a222c01a MM |
779 | timevar_push (TV_REMOVE_UNUSED); |
780 | ||
61e04322 | 781 | mark_scope_block_unused (DECL_INITIAL (current_function_decl)); |
726a989a | 782 | |
5f564b8f | 783 | usedvars = BITMAP_ALLOC (NULL); |
3f519b35 RG |
784 | |
785 | /* Walk the CFG marking all referenced symbols. */ | |
786 | FOR_EACH_BB (bb) | |
787 | { | |
726a989a RB |
788 | gimple_stmt_iterator gsi; |
789 | size_t i; | |
cc2a64dd JJ |
790 | edge_iterator ei; |
791 | edge e; | |
3f519b35 RG |
792 | |
793 | /* Walk the statements. */ | |
726a989a RB |
794 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
795 | { | |
796 | gimple stmt = gsi_stmt (gsi); | |
797 | tree b = gimple_block (stmt); | |
798 | ||
b5b8b0ac AO |
799 | if (is_gimple_debug (stmt)) |
800 | continue; | |
801 | ||
47598145 MM |
802 | if (gimple_clobber_p (stmt)) |
803 | { | |
804 | have_local_clobbers = true; | |
805 | continue; | |
806 | } | |
807 | ||
726a989a RB |
808 | if (b) |
809 | TREE_USED (b) = true; | |
3f519b35 | 810 | |
726a989a | 811 | for (i = 0; i < gimple_num_ops (stmt); i++) |
329df070 | 812 | mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i)); |
726a989a RB |
813 | } |
814 | ||
815 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3f519b35 RG |
816 | { |
817 | use_operand_p arg_p; | |
818 | ssa_op_iter i; | |
726a989a RB |
819 | tree def; |
820 | gimple phi = gsi_stmt (gsi); | |
3f519b35 | 821 | |
ea057359 | 822 | if (virtual_operand_p (gimple_phi_result (phi))) |
3f519b35 RG |
823 | continue; |
824 | ||
726a989a | 825 | def = gimple_phi_result (phi); |
329df070 | 826 | mark_all_vars_used (&def); |
3f519b35 RG |
827 | |
828 | FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES) | |
829 | { | |
830 | tree arg = USE_FROM_PTR (arg_p); | |
5368224f DC |
831 | int index = PHI_ARG_INDEX_FROM_USE (arg_p); |
832 | tree block = | |
833 | LOCATION_BLOCK (gimple_phi_arg_location (phi, index)); | |
834 | if (block != NULL) | |
835 | TREE_USED (block) = true; | |
329df070 | 836 | mark_all_vars_used (&arg); |
3f519b35 RG |
837 | } |
838 | } | |
cc2a64dd JJ |
839 | |
840 | FOR_EACH_EDGE (e, ei, bb->succs) | |
6040bb5f | 841 | if (LOCATION_BLOCK (e->goto_locus) != NULL) |
5368224f | 842 | TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true; |
3f519b35 RG |
843 | } |
844 | ||
47598145 MM |
845 | /* We do a two-pass approach about the out-of-scope clobbers. We want |
846 | to remove them if they are the only references to a local variable, | |
847 | but we want to retain them when there's any other. So the first pass | |
848 | ignores them, and the second pass (if there were any) tries to remove | |
849 | them. */ | |
850 | if (have_local_clobbers) | |
851 | FOR_EACH_BB (bb) | |
852 | { | |
853 | gimple_stmt_iterator gsi; | |
854 | ||
855 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) | |
856 | { | |
857 | gimple stmt = gsi_stmt (gsi); | |
858 | tree b = gimple_block (stmt); | |
859 | ||
860 | if (gimple_clobber_p (stmt)) | |
861 | { | |
862 | tree lhs = gimple_assign_lhs (stmt); | |
5d751b0c JJ |
863 | tree base = get_base_address (lhs); |
864 | /* Remove clobbers referencing unused vars, or clobbers | |
865 | with MEM_REF lhs referencing uninitialized pointers. */ | |
866 | if ((TREE_CODE (base) == VAR_DECL && !is_used_p (base)) | |
867 | || (TREE_CODE (lhs) == MEM_REF | |
868 | && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME | |
869 | && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0)) | |
870 | && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0))) | |
871 | != PARM_DECL))) | |
47598145 MM |
872 | { |
873 | unlink_stmt_vdef (stmt); | |
874 | gsi_remove (&gsi, true); | |
875 | release_defs (stmt); | |
876 | continue; | |
877 | } | |
878 | if (b) | |
879 | TREE_USED (b) = true; | |
880 | } | |
881 | gsi_next (&gsi); | |
882 | } | |
883 | } | |
884 | ||
fc3103e7 JJ |
885 | cfun->has_local_explicit_reg_vars = false; |
886 | ||
329df070 | 887 | /* Remove unmarked local and global vars from local_decls. */ |
9771b263 | 888 | num = vec_safe_length (cfun->local_decls); |
2fed2012 | 889 | for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++) |
3f519b35 | 890 | { |
9771b263 | 891 | var = (*cfun->local_decls)[srcidx]; |
9c09f152 | 892 | if (TREE_CODE (var) == VAR_DECL) |
3f519b35 | 893 | { |
329df070 | 894 | if (!is_used_p (var)) |
9c09f152 | 895 | { |
01c59d23 | 896 | tree def; |
ca5f4331 MM |
897 | if (cfun->nonlocal_goto_save_area |
898 | && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var) | |
899 | cfun->nonlocal_goto_save_area = NULL; | |
01c59d23 RG |
900 | /* Release any default def associated with var. */ |
901 | if ((def = ssa_default_def (cfun, var)) != NULL_TREE) | |
902 | { | |
903 | set_ssa_default_def (cfun, var, NULL_TREE); | |
904 | release_ssa_name (def); | |
905 | } | |
9c09f152 | 906 | continue; |
900461f3 | 907 | } |
3f519b35 | 908 | } |
9c09f152 RB |
909 | if (TREE_CODE (var) == VAR_DECL |
910 | && DECL_HARD_REGISTER (var) | |
911 | && !is_global_var (var)) | |
fc3103e7 | 912 | cfun->has_local_explicit_reg_vars = true; |
c021f10b | 913 | |
2fed2012 | 914 | if (srcidx != dstidx) |
9771b263 | 915 | (*cfun->local_decls)[dstidx] = var; |
2fed2012 | 916 | dstidx++; |
3f519b35 | 917 | } |
2fed2012 | 918 | if (dstidx != num) |
c0e50f72 RB |
919 | { |
920 | statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx); | |
921 | cfun->local_decls->truncate (dstidx); | |
922 | } | |
1431ce01 | 923 | |
329df070 | 924 | remove_unused_scope_block_p (DECL_INITIAL (current_function_decl)); |
5368224f | 925 | clear_unused_block_pointer (); |
1431ce01 | 926 | |
5f564b8f | 927 | BITMAP_FREE (usedvars); |
326648f1 | 928 | |
61e04322 JH |
929 | if (dump_file && (dump_flags & TDF_DETAILS)) |
930 | { | |
931 | fprintf (dump_file, "Scope blocks after cleanups:\n"); | |
cff7525f | 932 | dump_scope_blocks (dump_file, dump_flags); |
61e04322 | 933 | } |
a222c01a MM |
934 | |
935 | timevar_pop (TV_REMOVE_UNUSED); | |
3f519b35 RG |
936 | } |
937 | ||
3f9b14ff SB |
938 | /* Obstack for globale liveness info bitmaps. We don't want to put these |
939 | on the default obstack because these bitmaps can grow quite large and | |
940 | we'll hold on to all that memory until the end of the compiler run. | |
941 | As a bonus, delete_tree_live_info can destroy all the bitmaps by just | |
942 | releasing the whole obstack. */ | |
943 | static bitmap_obstack liveness_bitmap_obstack; | |
6de9cd9a DN |
944 | |
945 | /* Allocate and return a new live range information object base on MAP. */ | |
946 | ||
947 | static tree_live_info_p | |
948 | new_tree_live_info (var_map map) | |
949 | { | |
950 | tree_live_info_p live; | |
3f9b14ff | 951 | basic_block bb; |
6de9cd9a | 952 | |
3f9b14ff | 953 | live = XNEW (struct tree_live_info_d); |
6de9cd9a DN |
954 | live->map = map; |
955 | live->num_blocks = last_basic_block; | |
956 | ||
3f9b14ff SB |
957 | live->livein = XNEWVEC (bitmap_head, last_basic_block); |
958 | FOR_EACH_BB (bb) | |
959 | bitmap_initialize (&live->livein[bb->index], &liveness_bitmap_obstack); | |
6de9cd9a | 960 | |
3f9b14ff SB |
961 | live->liveout = XNEWVEC (bitmap_head, last_basic_block); |
962 | FOR_EACH_BB (bb) | |
963 | bitmap_initialize (&live->liveout[bb->index], &liveness_bitmap_obstack); | |
32ace6e2 AM |
964 | |
965 | live->work_stack = XNEWVEC (int, last_basic_block); | |
966 | live->stack_top = live->work_stack; | |
967 | ||
3f9b14ff | 968 | live->global = BITMAP_ALLOC (&liveness_bitmap_obstack); |
6de9cd9a DN |
969 | return live; |
970 | } | |
971 | ||
972 | ||
973 | /* Free storage for live range info object LIVE. */ | |
974 | ||
b8698a0f | 975 | void |
6de9cd9a DN |
976 | delete_tree_live_info (tree_live_info_p live) |
977 | { | |
3f9b14ff | 978 | bitmap_obstack_release (&liveness_bitmap_obstack); |
32ace6e2 | 979 | free (live->work_stack); |
32ace6e2 | 980 | free (live->liveout); |
32ace6e2 | 981 | free (live->livein); |
32ace6e2 | 982 | free (live); |
6de9cd9a DN |
983 | } |
984 | ||
985 | ||
b8698a0f L |
986 | /* Visit basic block BB and propagate any required live on entry bits from |
987 | LIVE into the predecessors. VISITED is the bitmap of visited blocks. | |
2e226e66 | 988 | TMP is a temporary work bitmap which is passed in to avoid reallocating |
32ace6e2 | 989 | it each time. */ |
6de9cd9a | 990 | |
b8698a0f | 991 | static void |
32ace6e2 AM |
992 | loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited, |
993 | bitmap tmp) | |
6de9cd9a | 994 | { |
32ace6e2 AM |
995 | edge e; |
996 | bool change; | |
997 | edge_iterator ei; | |
998 | basic_block pred_bb; | |
999 | bitmap loe; | |
6de9cd9a | 1000 | |
a2544177 | 1001 | gcc_checking_assert (!bitmap_bit_p (visited, bb->index)); |
d7c028c0 | 1002 | bitmap_set_bit (visited, bb->index); |
a2544177 | 1003 | |
32ace6e2 | 1004 | loe = live_on_entry (live, bb); |
6de9cd9a | 1005 | |
32ace6e2 | 1006 | FOR_EACH_EDGE (e, ei, bb->preds) |
6de9cd9a | 1007 | { |
32ace6e2 AM |
1008 | pred_bb = e->src; |
1009 | if (pred_bb == ENTRY_BLOCK_PTR) | |
1010 | continue; | |
7290d709 | 1011 | /* TMP is variables live-on-entry from BB that aren't defined in the |
b8698a0f | 1012 | predecessor block. This should be the live on entry vars to pred. |
32ace6e2 AM |
1013 | Note that liveout is the DEFs in a block while live on entry is |
1014 | being calculated. */ | |
3f9b14ff | 1015 | bitmap_and_compl (tmp, loe, &live->liveout[pred_bb->index]); |
32ace6e2 | 1016 | |
b8698a0f | 1017 | /* Add these bits to live-on-entry for the pred. if there are any |
32ace6e2 AM |
1018 | changes, and pred_bb has been visited already, add it to the |
1019 | revisit stack. */ | |
1020 | change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp); | |
d7c028c0 | 1021 | if (bitmap_bit_p (visited, pred_bb->index) && change) |
32ace6e2 | 1022 | { |
d7c028c0 | 1023 | bitmap_clear_bit (visited, pred_bb->index); |
32ace6e2 AM |
1024 | *(live->stack_top)++ = pred_bb->index; |
1025 | } | |
6de9cd9a DN |
1026 | } |
1027 | } | |
1028 | ||
1029 | ||
b8698a0f | 1030 | /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses |
2e226e66 | 1031 | of all the variables. */ |
6de9cd9a | 1032 | |
32ace6e2 AM |
1033 | static void |
1034 | live_worklist (tree_live_info_p live) | |
6de9cd9a | 1035 | { |
32ace6e2 | 1036 | unsigned b; |
6de9cd9a | 1037 | basic_block bb; |
32ace6e2 | 1038 | sbitmap visited = sbitmap_alloc (last_basic_block + 1); |
3f9b14ff | 1039 | bitmap tmp = BITMAP_ALLOC (&liveness_bitmap_obstack); |
6de9cd9a | 1040 | |
f61e445a | 1041 | bitmap_clear (visited); |
6de9cd9a | 1042 | |
110abdbc | 1043 | /* Visit all the blocks in reverse order and propagate live on entry values |
32ace6e2 AM |
1044 | into the predecessors blocks. */ |
1045 | FOR_EACH_BB_REVERSE (bb) | |
1046 | loe_visit_block (live, bb, visited, tmp); | |
6de9cd9a | 1047 | |
32ace6e2 AM |
1048 | /* Process any blocks which require further iteration. */ |
1049 | while (live->stack_top != live->work_stack) | |
6de9cd9a | 1050 | { |
32ace6e2 AM |
1051 | b = *--(live->stack_top); |
1052 | loe_visit_block (live, BASIC_BLOCK (b), visited, tmp); | |
1053 | } | |
6de9cd9a | 1054 | |
32ace6e2 AM |
1055 | BITMAP_FREE (tmp); |
1056 | sbitmap_free (visited); | |
1057 | } | |
6de9cd9a | 1058 | |
6de9cd9a | 1059 | |
2e226e66 | 1060 | /* Calculate the initial live on entry vector for SSA_NAME using immediate_use |
32ace6e2 AM |
1061 | links. Set the live on entry fields in LIVE. Def's are marked temporarily |
1062 | in the liveout vector. */ | |
6de9cd9a | 1063 | |
32ace6e2 AM |
1064 | static void |
1065 | set_var_live_on_entry (tree ssa_name, tree_live_info_p live) | |
1066 | { | |
1067 | int p; | |
726a989a | 1068 | gimple stmt; |
32ace6e2 AM |
1069 | use_operand_p use; |
1070 | basic_block def_bb = NULL; | |
1071 | imm_use_iterator imm_iter; | |
1072 | bool global = false; | |
6de9cd9a | 1073 | |
32ace6e2 AM |
1074 | p = var_to_partition (live->map, ssa_name); |
1075 | if (p == NO_PARTITION) | |
1076 | return; | |
6de9cd9a | 1077 | |
32ace6e2 AM |
1078 | stmt = SSA_NAME_DEF_STMT (ssa_name); |
1079 | if (stmt) | |
6de9cd9a | 1080 | { |
726a989a | 1081 | def_bb = gimple_bb (stmt); |
7290d709 | 1082 | /* Mark defs in liveout bitmap temporarily. */ |
32ace6e2 | 1083 | if (def_bb) |
3f9b14ff | 1084 | bitmap_set_bit (&live->liveout[def_bb->index], p); |
87c476a2 | 1085 | } |
32ace6e2 AM |
1086 | else |
1087 | def_bb = ENTRY_BLOCK_PTR; | |
6de9cd9a | 1088 | |
32ace6e2 AM |
1089 | /* Visit each use of SSA_NAME and if it isn't in the same block as the def, |
1090 | add it to the list of live on entry blocks. */ | |
1091 | FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name) | |
6de9cd9a | 1092 | { |
726a989a | 1093 | gimple use_stmt = USE_STMT (use); |
32ace6e2 | 1094 | basic_block add_block = NULL; |
6de9cd9a | 1095 | |
726a989a | 1096 | if (gimple_code (use_stmt) == GIMPLE_PHI) |
32ace6e2 AM |
1097 | { |
1098 | /* Uses in PHI's are considered to be live at exit of the SRC block | |
1099 | as this is where a copy would be inserted. Check to see if it is | |
1100 | defined in that block, or whether its live on entry. */ | |
1101 | int index = PHI_ARG_INDEX_FROM_USE (use); | |
726a989a | 1102 | edge e = gimple_phi_arg_edge (use_stmt, index); |
32ace6e2 | 1103 | if (e->src != ENTRY_BLOCK_PTR) |
6de9cd9a | 1104 | { |
32ace6e2 AM |
1105 | if (e->src != def_bb) |
1106 | add_block = e->src; | |
6de9cd9a | 1107 | } |
32ace6e2 | 1108 | } |
b5b8b0ac AO |
1109 | else if (is_gimple_debug (use_stmt)) |
1110 | continue; | |
32ace6e2 AM |
1111 | else |
1112 | { | |
1113 | /* If its not defined in this block, its live on entry. */ | |
726a989a | 1114 | basic_block use_bb = gimple_bb (use_stmt); |
32ace6e2 AM |
1115 | if (use_bb != def_bb) |
1116 | add_block = use_bb; | |
b8698a0f | 1117 | } |
32ace6e2 AM |
1118 | |
1119 | /* If there was a live on entry use, set the bit. */ | |
1120 | if (add_block) | |
1121 | { | |
1122 | global = true; | |
3f9b14ff | 1123 | bitmap_set_bit (&live->livein[add_block->index], p); |
6de9cd9a DN |
1124 | } |
1125 | } | |
6de9cd9a | 1126 | |
32ace6e2 AM |
1127 | /* If SSA_NAME is live on entry to at least one block, fill in all the live |
1128 | on entry blocks between the def and all the uses. */ | |
1129 | if (global) | |
1130 | bitmap_set_bit (live->global, p); | |
6de9cd9a DN |
1131 | } |
1132 | ||
1133 | ||
1134 | /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */ | |
1135 | ||
1136 | void | |
1137 | calculate_live_on_exit (tree_live_info_p liveinfo) | |
1138 | { | |
6de9cd9a DN |
1139 | basic_block bb; |
1140 | edge e; | |
32ace6e2 | 1141 | edge_iterator ei; |
6de9cd9a | 1142 | |
7290d709 | 1143 | /* live on entry calculations used liveout vectors for defs, clear them. */ |
32ace6e2 | 1144 | FOR_EACH_BB (bb) |
3f9b14ff | 1145 | bitmap_clear (&liveinfo->liveout[bb->index]); |
6de9cd9a DN |
1146 | |
1147 | /* Set all the live-on-exit bits for uses in PHIs. */ | |
1148 | FOR_EACH_BB (bb) | |
1149 | { | |
726a989a RB |
1150 | gimple_stmt_iterator gsi; |
1151 | size_t i; | |
1152 | ||
32ace6e2 | 1153 | /* Mark the PHI arguments which are live on exit to the pred block. */ |
726a989a RB |
1154 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
1155 | { | |
1156 | gimple phi = gsi_stmt (gsi); | |
1157 | for (i = 0; i < gimple_phi_num_args (phi); i++) | |
b8698a0f | 1158 | { |
726a989a RB |
1159 | tree t = PHI_ARG_DEF (phi, i); |
1160 | int p; | |
1161 | ||
1162 | if (TREE_CODE (t) != SSA_NAME) | |
1163 | continue; | |
1164 | ||
1165 | p = var_to_partition (liveinfo->map, t); | |
1166 | if (p == NO_PARTITION) | |
1167 | continue; | |
1168 | e = gimple_phi_arg_edge (phi, i); | |
1169 | if (e->src != ENTRY_BLOCK_PTR) | |
3f9b14ff | 1170 | bitmap_set_bit (&liveinfo->liveout[e->src->index], p); |
726a989a RB |
1171 | } |
1172 | } | |
32ace6e2 | 1173 | |
7290d709 | 1174 | /* Add each successors live on entry to this bock live on exit. */ |
32ace6e2 AM |
1175 | FOR_EACH_EDGE (e, ei, bb->succs) |
1176 | if (e->dest != EXIT_BLOCK_PTR) | |
3f9b14ff | 1177 | bitmap_ior_into (&liveinfo->liveout[bb->index], |
32ace6e2 | 1178 | live_on_entry (liveinfo, e->dest)); |
6de9cd9a | 1179 | } |
32ace6e2 AM |
1180 | } |
1181 | ||
7290d709 | 1182 | |
b8698a0f | 1183 | /* Given partition map MAP, calculate all the live on entry bitmaps for |
32ace6e2 AM |
1184 | each partition. Return a new live info object. */ |
1185 | ||
b8698a0f | 1186 | tree_live_info_p |
32ace6e2 AM |
1187 | calculate_live_ranges (var_map map) |
1188 | { | |
1189 | tree var; | |
1190 | unsigned i; | |
1191 | tree_live_info_p live; | |
6de9cd9a | 1192 | |
3f9b14ff | 1193 | bitmap_obstack_initialize (&liveness_bitmap_obstack); |
32ace6e2 | 1194 | live = new_tree_live_info (map); |
6de9cd9a DN |
1195 | for (i = 0; i < num_var_partitions (map); i++) |
1196 | { | |
32ace6e2 AM |
1197 | var = partition_to_var (map, i); |
1198 | if (var != NULL_TREE) | |
1199 | set_var_live_on_entry (var, live); | |
6de9cd9a DN |
1200 | } |
1201 | ||
32ace6e2 AM |
1202 | live_worklist (live); |
1203 | ||
1204 | #ifdef ENABLE_CHECKING | |
1205 | verify_live_on_entry (live); | |
1206 | #endif | |
1207 | ||
1208 | calculate_live_on_exit (live); | |
1209 | return live; | |
6de9cd9a DN |
1210 | } |
1211 | ||
1212 | ||
6de9cd9a DN |
1213 | /* Output partition map MAP to file F. */ |
1214 | ||
1215 | void | |
1216 | dump_var_map (FILE *f, var_map map) | |
1217 | { | |
1218 | int t; | |
1219 | unsigned x, y; | |
1220 | int p; | |
1221 | ||
1222 | fprintf (f, "\nPartition map \n\n"); | |
1223 | ||
1224 | for (x = 0; x < map->num_partitions; x++) | |
1225 | { | |
7290d709 AM |
1226 | if (map->view_to_partition != NULL) |
1227 | p = map->view_to_partition[x]; | |
6de9cd9a DN |
1228 | else |
1229 | p = x; | |
1230 | ||
12fdc099 SB |
1231 | if (ssa_name (p) == NULL_TREE |
1232 | || virtual_operand_p (ssa_name (p))) | |
6de9cd9a DN |
1233 | continue; |
1234 | ||
1235 | t = 0; | |
95a3742c | 1236 | for (y = 1; y < num_ssa_names; y++) |
6de9cd9a DN |
1237 | { |
1238 | p = partition_find (map->var_partition, y); | |
7290d709 AM |
1239 | if (map->partition_to_view) |
1240 | p = map->partition_to_view[p]; | |
6de9cd9a DN |
1241 | if (p == (int)x) |
1242 | { | |
1243 | if (t++ == 0) | |
1244 | { | |
c3284718 | 1245 | fprintf (f, "Partition %d (", x); |
6de9cd9a DN |
1246 | print_generic_expr (f, partition_to_var (map, p), TDF_SLIM); |
1247 | fprintf (f, " - "); | |
1248 | } | |
1249 | fprintf (f, "%d ", y); | |
1250 | } | |
1251 | } | |
1252 | if (t != 0) | |
1253 | fprintf (f, ")\n"); | |
1254 | } | |
1255 | fprintf (f, "\n"); | |
1256 | } | |
1257 | ||
1258 | ||
7b3b6ae4 LC |
1259 | /* Generic dump for the above. */ |
1260 | ||
1261 | DEBUG_FUNCTION void | |
1262 | debug (_var_map &ref) | |
1263 | { | |
1264 | dump_var_map (stderr, &ref); | |
1265 | } | |
1266 | ||
1267 | DEBUG_FUNCTION void | |
1268 | debug (_var_map *ptr) | |
1269 | { | |
1270 | if (ptr) | |
1271 | debug (*ptr); | |
1272 | else | |
1273 | fprintf (stderr, "<nil>\n"); | |
1274 | } | |
1275 | ||
1276 | ||
6de9cd9a DN |
1277 | /* Output live range info LIVE to file F, controlled by FLAG. */ |
1278 | ||
1279 | void | |
1280 | dump_live_info (FILE *f, tree_live_info_p live, int flag) | |
1281 | { | |
1282 | basic_block bb; | |
3cd8c58a | 1283 | unsigned i; |
6de9cd9a | 1284 | var_map map = live->map; |
87c476a2 | 1285 | bitmap_iterator bi; |
6de9cd9a DN |
1286 | |
1287 | if ((flag & LIVEDUMP_ENTRY) && live->livein) | |
1288 | { | |
1289 | FOR_EACH_BB (bb) | |
1290 | { | |
1291 | fprintf (f, "\nLive on entry to BB%d : ", bb->index); | |
3f9b14ff | 1292 | EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi) |
6de9cd9a | 1293 | { |
32ace6e2 AM |
1294 | print_generic_expr (f, partition_to_var (map, i), TDF_SLIM); |
1295 | fprintf (f, " "); | |
6de9cd9a DN |
1296 | } |
1297 | fprintf (f, "\n"); | |
1298 | } | |
1299 | } | |
1300 | ||
1301 | if ((flag & LIVEDUMP_EXIT) && live->liveout) | |
1302 | { | |
1303 | FOR_EACH_BB (bb) | |
1304 | { | |
1305 | fprintf (f, "\nLive on exit from BB%d : ", bb->index); | |
3f9b14ff | 1306 | EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi) |
6de9cd9a DN |
1307 | { |
1308 | print_generic_expr (f, partition_to_var (map, i), TDF_SLIM); | |
1309 | fprintf (f, " "); | |
87c476a2 | 1310 | } |
6de9cd9a DN |
1311 | fprintf (f, "\n"); |
1312 | } | |
1313 | } | |
1314 | } | |
1e128c5f | 1315 | |
7b3b6ae4 LC |
1316 | |
1317 | /* Generic dump for the above. */ | |
1318 | ||
1319 | DEBUG_FUNCTION void | |
1320 | debug (tree_live_info_d &ref) | |
1321 | { | |
1322 | dump_live_info (stderr, &ref, 0); | |
1323 | } | |
1324 | ||
1325 | DEBUG_FUNCTION void | |
1326 | debug (tree_live_info_d *ptr) | |
1327 | { | |
1328 | if (ptr) | |
1329 | debug (*ptr); | |
1330 | else | |
1331 | fprintf (stderr, "<nil>\n"); | |
1332 | } | |
1333 | ||
1334 | ||
1e128c5f | 1335 | #ifdef ENABLE_CHECKING |
7290d709 AM |
1336 | /* Verify that SSA_VAR is a non-virtual SSA_NAME. */ |
1337 | ||
1e128c5f GB |
1338 | void |
1339 | register_ssa_partition_check (tree ssa_var) | |
1340 | { | |
1341 | gcc_assert (TREE_CODE (ssa_var) == SSA_NAME); | |
ea057359 | 1342 | if (virtual_operand_p (ssa_var)) |
1e128c5f GB |
1343 | { |
1344 | fprintf (stderr, "Illegally registering a virtual SSA name :"); | |
1345 | print_generic_expr (stderr, ssa_var, TDF_SLIM); | |
1346 | fprintf (stderr, " in the SSA->Normal phase.\n"); | |
1347 | internal_error ("SSA corruption"); | |
1348 | } | |
1349 | } | |
32ace6e2 AM |
1350 | |
1351 | ||
1352 | /* Verify that the info in LIVE matches the current cfg. */ | |
7290d709 | 1353 | |
32ace6e2 AM |
1354 | static void |
1355 | verify_live_on_entry (tree_live_info_p live) | |
1356 | { | |
1357 | unsigned i; | |
1358 | tree var; | |
726a989a | 1359 | gimple stmt; |
32ace6e2 AM |
1360 | basic_block bb; |
1361 | edge e; | |
1362 | int num; | |
1363 | edge_iterator ei; | |
1364 | var_map map = live->map; | |
1365 | ||
1366 | /* Check for live on entry partitions and report those with a DEF in | |
1367 | the program. This will typically mean an optimization has done | |
1368 | something wrong. */ | |
32ace6e2 AM |
1369 | bb = ENTRY_BLOCK_PTR; |
1370 | num = 0; | |
1371 | FOR_EACH_EDGE (e, ei, bb->succs) | |
1372 | { | |
1373 | int entry_block = e->dest->index; | |
1374 | if (e->dest == EXIT_BLOCK_PTR) | |
1375 | continue; | |
1376 | for (i = 0; i < (unsigned)num_var_partitions (map); i++) | |
1377 | { | |
1378 | basic_block tmp; | |
70b5e7dc | 1379 | tree d = NULL_TREE; |
32ace6e2 AM |
1380 | bitmap loe; |
1381 | var = partition_to_var (map, i); | |
1382 | stmt = SSA_NAME_DEF_STMT (var); | |
726a989a | 1383 | tmp = gimple_bb (stmt); |
70b5e7dc RG |
1384 | if (SSA_NAME_VAR (var)) |
1385 | d = ssa_default_def (cfun, SSA_NAME_VAR (var)); | |
32ace6e2 AM |
1386 | |
1387 | loe = live_on_entry (live, e->dest); | |
1388 | if (loe && bitmap_bit_p (loe, i)) | |
1389 | { | |
726a989a | 1390 | if (!gimple_nop_p (stmt)) |
32ace6e2 AM |
1391 | { |
1392 | num++; | |
1393 | print_generic_expr (stderr, var, TDF_SLIM); | |
1394 | fprintf (stderr, " is defined "); | |
1395 | if (tmp) | |
1396 | fprintf (stderr, " in BB%d, ", tmp->index); | |
1397 | fprintf (stderr, "by:\n"); | |
726a989a | 1398 | print_gimple_stmt (stderr, stmt, 0, TDF_SLIM); |
b8698a0f | 1399 | fprintf (stderr, "\nIt is also live-on-entry to entry BB %d", |
32ace6e2 AM |
1400 | entry_block); |
1401 | fprintf (stderr, " So it appears to have multiple defs.\n"); | |
1402 | } | |
1403 | else | |
1404 | { | |
1405 | if (d != var) | |
1406 | { | |
1407 | num++; | |
1408 | print_generic_expr (stderr, var, TDF_SLIM); | |
726a989a RB |
1409 | fprintf (stderr, " is live-on-entry to BB%d ", |
1410 | entry_block); | |
32ace6e2 AM |
1411 | if (d) |
1412 | { | |
1413 | fprintf (stderr, " but is not the default def of "); | |
1414 | print_generic_expr (stderr, d, TDF_SLIM); | |
1415 | fprintf (stderr, "\n"); | |
1416 | } | |
1417 | else | |
1418 | fprintf (stderr, " and there is no default def.\n"); | |
1419 | } | |
1420 | } | |
1421 | } | |
1422 | else | |
1423 | if (d == var) | |
1424 | { | |
b8698a0f | 1425 | /* The only way this var shouldn't be marked live on entry is |
32ace6e2 | 1426 | if it occurs in a PHI argument of the block. */ |
726a989a RB |
1427 | size_t z; |
1428 | bool ok = false; | |
1429 | gimple_stmt_iterator gsi; | |
1430 | for (gsi = gsi_start_phis (e->dest); | |
1431 | !gsi_end_p (gsi) && !ok; | |
1432 | gsi_next (&gsi)) | |
32ace6e2 | 1433 | { |
726a989a RB |
1434 | gimple phi = gsi_stmt (gsi); |
1435 | for (z = 0; z < gimple_phi_num_args (phi); z++) | |
1436 | if (var == gimple_phi_arg_def (phi, z)) | |
32ace6e2 | 1437 | { |
726a989a | 1438 | ok = true; |
32ace6e2 AM |
1439 | break; |
1440 | } | |
1441 | } | |
1442 | if (ok) | |
1443 | continue; | |
1444 | num++; | |
1445 | print_generic_expr (stderr, var, TDF_SLIM); | |
b8698a0f | 1446 | fprintf (stderr, " is not marked live-on-entry to entry BB%d ", |
32ace6e2 AM |
1447 | entry_block); |
1448 | fprintf (stderr, "but it is a default def so it should be.\n"); | |
1449 | } | |
1450 | } | |
1451 | } | |
1452 | gcc_assert (num <= 0); | |
1453 | } | |
1e128c5f | 1454 | #endif |