]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-live.c
Allow automatics in equivalences
[thirdparty/gcc.git] / gcc / tree-ssa-live.c
CommitLineData
4ee9c684 1/* Liveness for SSA trees.
fbd26352 2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
4ee9c684 3 Contributed by Andrew MacLeod <amacleod@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
4ee9c684 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4ee9c684 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
9ef16211 24#include "backend.h"
7c29e30e 25#include "rtl.h"
4ee9c684 26#include "tree.h"
9ef16211 27#include "gimple.h"
7c29e30e 28#include "timevar.h"
9ef16211 29#include "ssa.h"
7c29e30e 30#include "cgraph.h"
31#include "gimple-pretty-print.h"
32#include "diagnostic-core.h"
dcf1a1ec 33#include "gimple-iterator.h"
073c1fd5 34#include "tree-dfa.h"
b9ed1410 35#include "dumpfile.h"
4ee9c684 36#include "tree-ssa-live.h"
2ff2a12b 37#include "debug.h"
4d8d655b 38#include "tree-ssa.h"
a467a47b 39#include "ipa-utils.h"
6c01cf92 40#include "cfgloop.h"
30a86690 41#include "stringpool.h"
42#include "attribs.h"
9dcf2a11 43#include "optinfo.h"
91b30299 44#include "gimple-walk.h"
45#include "cfganal.h"
2d043327 46
382ecba7 47static void verify_live_on_entry (tree_live_info_p);
4ee9c684 48
4ee9c684 49
2d043327 50/* VARMAP maintains a mapping from SSA version number to real variables.
51
52 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
53 only member of it's own partition. Coalescing will attempt to group any
54 ssa_names which occur in a copy or in a PHI node into the same partition.
55
56 At the end of out-of-ssa, each partition becomes a "real" variable and is
57 rewritten as a compiler variable.
58
f0b5f617 59 The var_map data structure is used to manage these partitions. It allows
2d043327 60 partitions to be combined, and determines which partition belongs to what
61 ssa_name or variable, and vice versa. */
62
63
2d043327 64/* Remove the base table in MAP. */
4ee9c684 65
2d043327 66static void
67var_map_base_fini (var_map map)
68{
69 /* Free the basevar info if it is present. */
70 if (map->partition_to_base_index != NULL)
71 {
2d043327 72 free (map->partition_to_base_index);
73 map->partition_to_base_index = NULL;
74 map->num_basevars = 0;
75 }
76}
74bfe107 77/* Create a variable partition map of SIZE for region, initialize and return
78 it. Region is a loop if LOOP is non-NULL, otherwise is the current
79 function. */
4ee9c684 80
81var_map
2e966e2a 82init_var_map (int size, class loop *loop)
4ee9c684 83{
84 var_map map;
85
86 map = (var_map) xmalloc (sizeof (struct _var_map));
87 map->var_partition = partition_new (size);
4ee9c684 88
2d043327 89 map->partition_to_view = NULL;
90 map->view_to_partition = NULL;
4ee9c684 91 map->num_partitions = size;
92 map->partition_size = size;
2d043327 93 map->num_basevars = 0;
94 map->partition_to_base_index = NULL;
74bfe107 95 map->vec_bbs = vNULL;
96 if (loop)
97 {
98 map->bmp_bbs = BITMAP_ALLOC (NULL);
99 map->outofssa_p = false;
100 basic_block *bbs = get_loop_body_in_dom_order (loop);
101 for (unsigned i = 0; i < loop->num_nodes; ++i)
102 {
103 bitmap_set_bit (map->bmp_bbs, bbs[i]->index);
104 map->vec_bbs.safe_push (bbs[i]);
105 }
106 free (bbs);
107 }
108 else
109 {
110 map->bmp_bbs = NULL;
111 map->outofssa_p = true;
112 basic_block bb;
113 FOR_EACH_BB_FN (bb, cfun)
114 map->vec_bbs.safe_push (bb);
115 }
4ee9c684 116 return map;
117}
118
119
120/* Free memory associated with MAP. */
121
122void
123delete_var_map (var_map map)
124{
2d043327 125 var_map_base_fini (map);
4ee9c684 126 partition_delete (map->var_partition);
dd045aee 127 free (map->partition_to_view);
128 free (map->view_to_partition);
74bfe107 129 if (map->bmp_bbs)
130 BITMAP_FREE (map->bmp_bbs);
131 map->vec_bbs.release ();
4ee9c684 132 free (map);
133}
134
135
48e1416a 136/* This function will combine the partitions in MAP for VAR1 and VAR2. It
137 Returns the partition which represents the new partition. If the two
0bed3869 138 partitions cannot be combined, NO_PARTITION is returned. */
4ee9c684 139
140int
141var_union (var_map map, tree var1, tree var2)
142{
143 int p1, p2, p3;
a8dd994c 144
145 gcc_assert (TREE_CODE (var1) == SSA_NAME);
146 gcc_assert (TREE_CODE (var2) == SSA_NAME);
4ee9c684 147
48e1416a 148 /* This is independent of partition_to_view. If partition_to_view is
4ee9c684 149 on, then whichever one of these partitions is absorbed will never have a
2d043327 150 dereference into the partition_to_view array any more. */
4ee9c684 151
a8dd994c 152 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
153 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
4ee9c684 154
8c0963c4 155 gcc_assert (p1 != NO_PARTITION);
156 gcc_assert (p2 != NO_PARTITION);
4ee9c684 157
158 if (p1 == p2)
159 p3 = p1;
160 else
161 p3 = partition_union (map->var_partition, p1, p2);
162
2d043327 163 if (map->partition_to_view)
164 p3 = map->partition_to_view[p3];
4ee9c684 165
4ee9c684 166 return p3;
167}
168
48e1416a 169
170/* Compress the partition numbers in MAP such that they fall in the range
4ee9c684 171 0..(num_partitions-1) instead of wherever they turned out during
172 the partitioning exercise. This removes any references to unused
173 partitions, thereby allowing bitmaps and other vectors to be much
48e1416a 174 denser.
4ee9c684 175
176 This is implemented such that compaction doesn't affect partitioning.
177 Ie., once partitions are created and possibly merged, running one
178 or more different kind of compaction will not affect the partitions
179 themselves. Their index might change, but all the same variables will
180 still be members of the same partition group. This allows work on reduced
181 sets, and no loss of information when a larger set is later desired.
182
183 In particular, coalescing can work on partitions which have 2 or more
184 definitions, and then 'recompact' later to include all the single
185 definitions for assignment to program variables. */
186
2d043327 187
48e1416a 188/* Set MAP back to the initial state of having no partition view. Return a
189 bitmap which has a bit set for each partition number which is in use in the
2d043327 190 varmap. */
191
192static bitmap
193partition_view_init (var_map map)
4ee9c684 194{
2d043327 195 bitmap used;
196 int tmp;
197 unsigned int x;
4ee9c684 198
2d043327 199 used = BITMAP_ALLOC (NULL);
4ee9c684 200
2d043327 201 /* Already in a view? Abandon the old one. */
202 if (map->partition_to_view)
4ee9c684 203 {
2d043327 204 free (map->partition_to_view);
205 map->partition_to_view = NULL;
4ee9c684 206 }
2d043327 207 if (map->view_to_partition)
4ee9c684 208 {
2d043327 209 free (map->view_to_partition);
210 map->view_to_partition = NULL;
4ee9c684 211 }
212
4ee9c684 213 /* Find out which partitions are actually referenced. */
2d043327 214 for (x = 0; x < map->partition_size; x++)
4ee9c684 215 {
216 tmp = partition_find (map->var_partition, x);
7c782c9b 217 if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
a8dd994c 218 && (!has_zero_uses (ssa_name (tmp))
b2df3bbf 219 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))
220 || (SSA_NAME_VAR (ssa_name (tmp))
221 && !VAR_P (SSA_NAME_VAR (ssa_name (tmp))))))
2d043327 222 bitmap_set_bit (used, tmp);
4ee9c684 223 }
224
2d043327 225 map->num_partitions = map->partition_size;
226 return used;
227}
228
229
230/* This routine will finalize the view data for MAP based on the partitions
48e1416a 231 set in SELECTED. This is either the same bitmap returned from
2d043327 232 partition_view_init, or a trimmed down version if some of those partitions
233 were not desired in this view. SELECTED is freed before returning. */
234
48e1416a 235static void
2d043327 236partition_view_fini (var_map map, bitmap selected)
237{
238 bitmap_iterator bi;
239 unsigned count, i, x, limit;
2d043327 240
241 gcc_assert (selected);
242
243 count = bitmap_count_bits (selected);
244 limit = map->partition_size;
245
246 /* If its a one-to-one ratio, we don't need any view compaction. */
247 if (count < limit)
4ee9c684 248 {
2d043327 249 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
250 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
251 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
3e790786 252
2d043327 253 i = 0;
254 /* Give each selected partition an index. */
255 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
4ee9c684 256 {
2d043327 257 map->partition_to_view[x] = i;
258 map->view_to_partition[i] = x;
2d043327 259 i++;
3e790786 260 }
2d043327 261 gcc_assert (i == count);
262 map->num_partitions = i;
4ee9c684 263 }
2d043327 264
265 BITMAP_FREE (selected);
266}
267
268
94f92c36 269/* Create a partition view which includes all the used partitions in MAP. */
2d043327 270
4fb07d00 271void
94f92c36 272partition_view_normal (var_map map)
2d043327 273{
274 bitmap used;
275
276 used = partition_view_init (map);
277 partition_view_fini (map, used);
278
94f92c36 279 var_map_base_fini (map);
2d043327 280}
281
282
48e1416a 283/* Create a partition view in MAP which includes just partitions which occur in
284 the bitmap ONLY. If WANT_BASES is true, create the base variable map
2d043327 285 as well. */
286
4fb07d00 287void
94f92c36 288partition_view_bitmap (var_map map, bitmap only)
2d043327 289{
290 bitmap used;
291 bitmap new_partitions = BITMAP_ALLOC (NULL);
292 unsigned x, p;
293 bitmap_iterator bi;
294
295 used = partition_view_init (map);
296 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
4ee9c684 297 {
2d043327 298 p = partition_find (map->var_partition, x);
299 gcc_assert (bitmap_bit_p (used, p));
300 bitmap_set_bit (new_partitions, p);
4ee9c684 301 }
2d043327 302 partition_view_fini (map, new_partitions);
4ee9c684 303
94f92c36 304 var_map_base_fini (map);
4ee9c684 305}
306
307
4ae5778c 308static bitmap usedvars;
309
920bd157 310/* Mark VAR as used, so that it'll be preserved during rtl expansion.
311 Returns true if VAR wasn't marked before. */
4ae5778c 312
920bd157 313static inline bool
4ae5778c 314set_is_used (tree var)
315{
920bd157 316 return bitmap_set_bit (usedvars, DECL_UID (var));
4ae5778c 317}
318
319/* Return true if VAR is marked as used. */
320
321static inline bool
322is_used_p (tree var)
323{
324 return bitmap_bit_p (usedvars, DECL_UID (var));
325}
326
920bd157 327static inline void mark_all_vars_used (tree *);
4ee9c684 328
280450fa 329/* Helper function for mark_all_vars_used, called via walk_tree. */
330
331static tree
920bd157 332mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
280450fa 333{
334 tree t = *tp;
2ff2a12b 335 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
336 tree b;
280450fa 337
db22d3cc 338 if (TREE_CODE (t) == SSA_NAME)
ec11736b 339 {
340 *walk_subtrees = 0;
341 t = SSA_NAME_VAR (t);
342 if (!t)
343 return NULL;
344 }
75a70cf9 345
346 if (IS_EXPR_CODE_CLASS (c)
2ff2a12b 347 && (b = TREE_BLOCK (t)) != NULL)
348 TREE_USED (b) = true;
db22d3cc 349
28daba6f 350 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
351 fields do not contain vars. */
b06d1934 352 if (TREE_CODE (t) == TARGET_MEM_REF)
353 {
920bd157 354 mark_all_vars_used (&TMR_BASE (t));
355 mark_all_vars_used (&TMR_INDEX (t));
356 mark_all_vars_used (&TMR_INDEX2 (t));
b06d1934 357 *walk_subtrees = 0;
358 return NULL;
359 }
360
280450fa 361 /* Only need to mark VAR_DECLS; parameters and return results are not
362 eliminated as unused. */
53e9c5c4 363 if (VAR_P (t))
a93b21ea 364 {
920bd157 365 /* When a global var becomes used for the first time also walk its
366 initializer (non global ones don't have any). */
c8a9710d 367 if (set_is_used (t) && is_global_var (t)
368 && DECL_CONTEXT (t) == current_function_decl)
920bd157 369 mark_all_vars_used (&DECL_INITIAL (t));
a93b21ea 370 }
6ceec668 371 /* remove_unused_scope_block_p requires information about labels
372 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
ad75582e 373 else if (TREE_CODE (t) == LABEL_DECL)
6ceec668 374 /* Although the TREE_USED values that the frontend uses would be
375 acceptable (albeit slightly over-conservative) for our purposes,
376 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
377 must re-compute it here. */
378 TREE_USED (t) = 1;
280450fa 379
ce45a448 380 if (IS_TYPE_OR_DECL_P (t))
280450fa 381 *walk_subtrees = 0;
382
383 return NULL;
384}
385
2ff2a12b 386/* Mark the scope block SCOPE and its subblocks unused when they can be
387 possibly eliminated if dead. */
388
389static void
390mark_scope_block_unused (tree scope)
391{
392 tree t;
393 TREE_USED (scope) = false;
394 if (!(*debug_hooks->ignore_block) (scope))
395 TREE_USED (scope) = true;
396 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
397 mark_scope_block_unused (t);
398}
399
400/* Look if the block is dead (by possibly eliminating its dead subblocks)
48e1416a 401 and return true if so.
2ff2a12b 402 Block is declared dead if:
403 1) No statements are associated with it.
404 2) Declares no live variables
405 3) All subblocks are dead
406 or there is precisely one subblocks and the block
407 has same abstract origin as outer block and declares
408 no variables, so it is pure wrapper.
4a7e4fcc 409 When we are not outputting full debug info, we also eliminate dead variables
2ff2a12b 410 out of scope blocks to let them to be recycled by GGC and to save copying work
411 done by the inliner. */
412
413static bool
a467a47b 414remove_unused_scope_block_p (tree scope, bool in_ctor_dtor_block)
2ff2a12b 415{
8c796c7c 416 tree *t, *next;
2ff2a12b 417 bool unused = !TREE_USED (scope);
2ff2a12b 418 int nsubblocks = 0;
419
a467a47b 420 /* For ipa-polymorphic-call.c purposes, preserve blocks:
421 1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones */
422 if (inlined_polymorphic_ctor_dtor_block_p (scope, true))
423 {
424 in_ctor_dtor_block = true;
425 unused = false;
426 }
402d7a90 427 /* 2) inside such blocks, the outermost block with block_ultimate_origin
a467a47b 428 being a FUNCTION_DECL. */
402d7a90 429 else if (in_ctor_dtor_block)
a467a47b 430 {
402d7a90 431 tree fn = block_ultimate_origin (scope);
432 if (fn && TREE_CODE (fn) == FUNCTION_DECL)
433 {
434 in_ctor_dtor_block = false;
435 unused = false;
436 }
a467a47b 437 }
438
8c796c7c 439 for (t = &BLOCK_VARS (scope); *t; t = next)
440 {
1767a056 441 next = &DECL_CHAIN (*t);
8c796c7c 442
443 /* Debug info of nested function refers to the block of the
36267649 444 function. We might stil call it even if all statements
445 of function it was nested into was elliminated.
48e1416a 446
36267649 447 TODO: We can actually look into cgraph to see if function
448 will be output to file. */
8c796c7c 449 if (TREE_CODE (*t) == FUNCTION_DECL)
450 unused = false;
ee093c13 451
452 /* If a decl has a value expr, we need to instantiate it
453 regardless of debug info generation, to avoid codegen
454 differences in memory overlap tests. update_equiv_regs() may
455 indirectly call validate_equiv_mem() to test whether a
456 SET_DEST overlaps with others, and if the value expr changes
457 by virtual register instantiation, we may get end up with
458 different results. */
53e9c5c4 459 else if (VAR_P (*t) && DECL_HAS_VALUE_EXPR_P (*t))
ee093c13 460 unused = false;
461
25db41e9 462 /* Remove everything we don't generate debug info for. */
463 else if (DECL_IGNORED_P (*t))
64f6c8c9 464 {
1767a056 465 *t = DECL_CHAIN (*t);
64f6c8c9 466 next = t;
467 }
468
8c796c7c 469 /* When we are outputting debug info, we usually want to output
470 info about optimized-out variables in the scope blocks.
471 Exception are the scope blocks not containing any instructions
472 at all so user can't get into the scopes at first place. */
920bd157 473 else if (is_used_p (*t))
8c796c7c 474 unused = false;
6ceec668 475 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
476 /* For labels that are still used in the IL, the decision to
477 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
478 risk having different ordering in debug vs. non-debug builds
479 during inlining or versioning.
480 A label appearing here (we have already checked DECL_IGNORED_P)
481 should not be used in the IL unless it has been explicitly used
482 before, so we use TREE_USED as an approximation. */
483 /* In principle, we should do the same here as for the debug case
484 below, however, when debugging, there might be additional nested
485 levels that keep an upper level with a label live, so we have to
486 force this block to be considered used, too. */
487 unused = false;
8c796c7c 488
489 /* When we are not doing full debug info, we however can keep around
490 only the used variables for cfgexpand's memory packing saving quite
48e1416a 491 a lot of memory.
36267649 492
493 For sake of -g3, we keep around those vars but we don't count this as
494 use of block, so innermost block with no used vars and no instructions
495 can be considered dead. We only want to keep around blocks user can
48e1416a 496 breakpoint into and ask about value of optimized out variables.
36267649 497
29bcbc13 498 Similarly we need to keep around types at least until all
499 variables of all nested blocks are gone. We track no
500 information on whether given type is used or not, so we have
501 to keep them even when not emitting debug information,
502 otherwise we may end up remapping variables and their (local)
503 types in different orders depending on whether debug
504 information is being generated. */
505
506 else if (TREE_CODE (*t) == TYPE_DECL
507 || debug_info_level == DINFO_LEVEL_NORMAL
45b2f957 508 || debug_info_level == DINFO_LEVEL_VERBOSE)
36267649 509 ;
7f481d3e 510 else
8c796c7c 511 {
1767a056 512 *t = DECL_CHAIN (*t);
8c796c7c 513 next = t;
514 }
515 }
516
2ff2a12b 517 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
a467a47b 518 if (remove_unused_scope_block_p (*t, in_ctor_dtor_block))
2ff2a12b 519 {
520 if (BLOCK_SUBBLOCKS (*t))
521 {
522 tree next = BLOCK_CHAIN (*t);
523 tree supercontext = BLOCK_SUPERCONTEXT (*t);
cee43f7e 524
2ff2a12b 525 *t = BLOCK_SUBBLOCKS (*t);
cee43f7e 526 while (BLOCK_CHAIN (*t))
527 {
528 BLOCK_SUPERCONTEXT (*t) = supercontext;
529 t = &BLOCK_CHAIN (*t);
530 }
2ff2a12b 531 BLOCK_CHAIN (*t) = next;
532 BLOCK_SUPERCONTEXT (*t) = supercontext;
533 t = &BLOCK_CHAIN (*t);
534 nsubblocks ++;
535 }
536 else
36267649 537 *t = BLOCK_CHAIN (*t);
2ff2a12b 538 }
539 else
540 {
541 t = &BLOCK_CHAIN (*t);
542 nsubblocks ++;
543 }
cee43f7e 544
545
546 if (!unused)
547 ;
2ff2a12b 548 /* Outer scope is always used. */
cee43f7e 549 else if (!BLOCK_SUPERCONTEXT (scope)
550 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
2ff2a12b 551 unused = false;
cee43f7e 552 /* Innermost blocks with no live variables nor statements can be always
553 eliminated. */
554 else if (!nsubblocks)
555 ;
7fa9fa16 556 /* When not generating debug info we can eliminate info on unused
557 variables. */
9dcf2a11 558 else if (!flag_auto_profile && debug_info_level == DINFO_LEVEL_NONE
559 && !optinfo_wants_inlining_info_p ())
f63d3ecc 560 {
7fa9fa16 561 /* Even for -g0 don't prune outer scopes from artificial
f63d3ecc 562 functions, otherwise diagnostics using tree_nonartificial_location
563 will not be emitted properly. */
564 if (inlined_function_outer_scope_p (scope))
565 {
0e59d2fb 566 tree ao = BLOCK_ORIGIN (scope);
f63d3ecc 567 if (ao
568 && TREE_CODE (ao) == FUNCTION_DECL
569 && DECL_DECLARED_INLINE_P (ao)
570 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
571 unused = false;
572 }
573 }
4b5d70fd 574 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
2ff2a12b 575 unused = false;
8f6f3638 576 /* See if this block is important for representation of inlined
577 function. Inlined functions are always represented by block
578 with block_ultimate_origin being set to FUNCTION_DECL and
1d74f6b1 579 DECL_SOURCE_LOCATION set, unless they expand to nothing... */
580 else if (inlined_function_outer_scope_p (scope))
cee43f7e 581 unused = false;
582 else
583 /* Verfify that only blocks with source location set
584 are entry points to the inlined functions. */
8e7408e3 585 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
586 == UNKNOWN_LOCATION);
b26d0c9e 587
588 TREE_USED (scope) = !unused;
2ff2a12b 589 return unused;
590}
2d043327 591
48e1416a 592/* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
280450fa 593 eliminated during the tree->rtl conversion process. */
594
595static inline void
920bd157 596mark_all_vars_used (tree *expr_p)
280450fa 597{
920bd157 598 walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
280450fa 599}
600
5169661d 601/* Helper function for clear_unused_block_pointer, called via walk_tree. */
602
603static tree
604clear_unused_block_pointer_1 (tree *tp, int *, void *)
605{
606 if (EXPR_P (*tp) && TREE_BLOCK (*tp)
607 && !TREE_USED (TREE_BLOCK (*tp)))
608 TREE_SET_BLOCK (*tp, NULL);
5169661d 609 return NULL_TREE;
610}
611
9f559b20 612/* Set all block pointer in debug or clobber stmt to NULL if the block
613 is unused, so that they will not be streamed out. */
5169661d 614
615static void
f1ff4562 616clear_unused_block_pointer (void)
5169661d 617{
618 basic_block bb;
619 gimple_stmt_iterator gsi;
1aa7a266 620
fc00614f 621 FOR_EACH_BB_FN (bb, cfun)
5169661d 622 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
623 {
624 unsigned i;
625 tree b;
42acab1c 626 gimple *stmt = gsi_stmt (gsi);
5169661d 627
9f559b20 628 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
5169661d 629 continue;
630 b = gimple_block (stmt);
631 if (b && !TREE_USED (b))
632 gimple_set_block (stmt, NULL);
633 for (i = 0; i < gimple_num_ops (stmt); i++)
634 walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
635 NULL, NULL);
636 }
637}
b79917fd 638
639/* Dump scope blocks starting at SCOPE to FILE. INDENT is the
640 indentation level and FLAGS is as in print_generic_expr. */
36267649 641
642static void
3f6e5ced 643dump_scope_block (FILE *file, int indent, tree scope, dump_flags_t flags)
36267649 644{
645 tree var, t;
4b5d70fd 646 unsigned int i;
36267649 647
41f70262 648 fprintf (file, "\n%*s{ Scope block #%i%s",indent, "" , BLOCK_NUMBER (scope),
649 TREE_USED (scope) ? "" : " (unused)");
8e7408e3 650 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
cee43f7e 651 {
652 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
653 fprintf (file, " %s:%i", s.file, s.line);
654 }
655 if (BLOCK_ABSTRACT_ORIGIN (scope))
36267649 656 {
cee43f7e 657 tree origin = block_ultimate_origin (scope);
658 if (origin)
659 {
660 fprintf (file, " Originating from :");
661 if (DECL_P (origin))
662 print_generic_decl (file, origin, flags);
663 else
664 fprintf (file, "#%i", BLOCK_NUMBER (origin));
665 }
36267649 666 }
8f6f3638 667 if (BLOCK_FRAGMENT_ORIGIN (scope))
668 fprintf (file, " Fragment of : #%i",
669 BLOCK_NUMBER (BLOCK_FRAGMENT_ORIGIN (scope)));
670 else if (BLOCK_FRAGMENT_CHAIN (scope))
671 {
672 fprintf (file, " Fragment chain :");
673 for (t = BLOCK_FRAGMENT_CHAIN (scope); t ;
674 t = BLOCK_FRAGMENT_CHAIN (t))
675 fprintf (file, " #%i", BLOCK_NUMBER (t));
676 }
cee43f7e 677 fprintf (file, " \n");
1767a056 678 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
36267649 679 {
f665f7bb 680 fprintf (file, "%*s", indent, "");
36267649 681 print_generic_decl (file, var, flags);
4ae5778c 682 fprintf (file, "\n");
36267649 683 }
4b5d70fd 684 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
685 {
686 fprintf (file, "%*s",indent, "");
687 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
688 flags);
689 fprintf (file, " (nonlocalized)\n");
690 }
36267649 691 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
692 dump_scope_block (file, indent + 2, t, flags);
cee43f7e 693 fprintf (file, "\n%*s}\n",indent, "");
36267649 694}
695
7496b609 696/* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
697 is as in print_generic_expr. */
698
4b987fac 699DEBUG_FUNCTION void
3f6e5ced 700debug_scope_block (tree scope, dump_flags_t flags)
7496b609 701{
702 dump_scope_block (stderr, 0, scope, flags);
703}
704
b79917fd 705
706/* Dump the tree of lexical scopes of current_function_decl to FILE.
707 FLAGS is as in print_generic_expr. */
708
cee43f7e 709void
3f6e5ced 710dump_scope_blocks (FILE *file, dump_flags_t flags)
cee43f7e 711{
712 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
713}
db22d3cc 714
b79917fd 715
716/* Dump the tree of lexical scopes of current_function_decl to stderr.
717 FLAGS is as in print_generic_expr. */
718
4b987fac 719DEBUG_FUNCTION void
3f6e5ced 720debug_scope_blocks (dump_flags_t flags)
b79917fd 721{
722 dump_scope_blocks (stderr, flags);
723}
724
db22d3cc 725/* Remove local variables that are not referenced in the IL. */
726
727void
728remove_unused_locals (void)
729{
730 basic_block bb;
b03e5397 731 tree var;
920bd157 732 unsigned srcidx, dstidx, num;
3c25489e 733 bool have_local_clobbers = false;
db22d3cc 734
fe5c69b7 735 /* Removing declarations from lexical blocks when not optimizing is
736 not only a waste of time, it actually causes differences in stack
737 layout. */
738 if (!optimize)
739 return;
740
4b366dd3 741 timevar_push (TV_REMOVE_UNUSED);
742
36267649 743 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
75a70cf9 744
4ae5778c 745 usedvars = BITMAP_ALLOC (NULL);
db22d3cc 746
747 /* Walk the CFG marking all referenced symbols. */
fc00614f 748 FOR_EACH_BB_FN (bb, cfun)
db22d3cc 749 {
75a70cf9 750 gimple_stmt_iterator gsi;
751 size_t i;
32dedf8f 752 edge_iterator ei;
753 edge e;
db22d3cc 754
755 /* Walk the statements. */
75a70cf9 756 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
757 {
42acab1c 758 gimple *stmt = gsi_stmt (gsi);
75a70cf9 759 tree b = gimple_block (stmt);
760
1d74f6b1 761 /* If we wanted to mark the block referenced by the inline
762 entry point marker as used, this would be a good spot to
763 do it. If the block is not otherwise used, the stmt will
764 be cleaned up in clean_unused_block_pointer. */
9845d120 765 if (is_gimple_debug (stmt))
766 continue;
767
3c25489e 768 if (gimple_clobber_p (stmt))
769 {
770 have_local_clobbers = true;
771 continue;
772 }
773
75a70cf9 774 if (b)
775 TREE_USED (b) = true;
db22d3cc 776
75a70cf9 777 for (i = 0; i < gimple_num_ops (stmt); i++)
920bd157 778 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
75a70cf9 779 }
780
1a91d914 781 for (gphi_iterator gpi = gsi_start_phis (bb);
782 !gsi_end_p (gpi);
783 gsi_next (&gpi))
db22d3cc 784 {
785 use_operand_p arg_p;
786 ssa_op_iter i;
75a70cf9 787 tree def;
1a91d914 788 gphi *phi = gpi.phi ();
db22d3cc 789
7c782c9b 790 if (virtual_operand_p (gimple_phi_result (phi)))
db22d3cc 791 continue;
792
75a70cf9 793 def = gimple_phi_result (phi);
920bd157 794 mark_all_vars_used (&def);
db22d3cc 795
796 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
797 {
798 tree arg = USE_FROM_PTR (arg_p);
5169661d 799 int index = PHI_ARG_INDEX_FROM_USE (arg_p);
800 tree block =
801 LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
802 if (block != NULL)
803 TREE_USED (block) = true;
920bd157 804 mark_all_vars_used (&arg);
db22d3cc 805 }
806 }
32dedf8f 807
808 FOR_EACH_EDGE (e, ei, bb->succs)
f1ff4562 809 if (LOCATION_BLOCK (e->goto_locus) != NULL)
5169661d 810 TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
db22d3cc 811 }
812
3c25489e 813 /* We do a two-pass approach about the out-of-scope clobbers. We want
814 to remove them if they are the only references to a local variable,
815 but we want to retain them when there's any other. So the first pass
816 ignores them, and the second pass (if there were any) tries to remove
817 them. */
818 if (have_local_clobbers)
fc00614f 819 FOR_EACH_BB_FN (bb, cfun)
3c25489e 820 {
821 gimple_stmt_iterator gsi;
822
823 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
824 {
42acab1c 825 gimple *stmt = gsi_stmt (gsi);
3c25489e 826 tree b = gimple_block (stmt);
827
828 if (gimple_clobber_p (stmt))
829 {
830 tree lhs = gimple_assign_lhs (stmt);
9f559b20 831 tree base = get_base_address (lhs);
832 /* Remove clobbers referencing unused vars, or clobbers
833 with MEM_REF lhs referencing uninitialized pointers. */
53e9c5c4 834 if ((VAR_P (base) && !is_used_p (base))
9f559b20 835 || (TREE_CODE (lhs) == MEM_REF
836 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
837 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))
838 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))
839 != PARM_DECL)))
3c25489e 840 {
841 unlink_stmt_vdef (stmt);
842 gsi_remove (&gsi, true);
843 release_defs (stmt);
844 continue;
845 }
846 if (b)
847 TREE_USED (b) = true;
848 }
849 gsi_next (&gsi);
850 }
851 }
852
6c01cf92 853 if (cfun->has_simduid_loops)
854 {
2e966e2a 855 class loop *loop;
6c01cf92 856 FOR_EACH_LOOP (loop, 0)
857 if (loop->simduid && !is_used_p (loop->simduid))
858 loop->simduid = NULL_TREE;
859 }
860
e15deb4b 861 cfun->has_local_explicit_reg_vars = false;
862
920bd157 863 /* Remove unmarked local and global vars from local_decls. */
f1f41a6c 864 num = vec_safe_length (cfun->local_decls);
501bdd19 865 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
db22d3cc 866 {
f1f41a6c 867 var = (*cfun->local_decls)[srcidx];
53e9c5c4 868 if (VAR_P (var))
db22d3cc 869 {
920bd157 870 if (!is_used_p (var))
ad75582e 871 {
1ba198c0 872 tree def;
7843e4bc 873 if (cfun->nonlocal_goto_save_area
874 && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
875 cfun->nonlocal_goto_save_area = NULL;
1ba198c0 876 /* Release any default def associated with var. */
877 if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
878 {
879 set_ssa_default_def (cfun, var, NULL_TREE);
880 release_ssa_name (def);
881 }
ad75582e 882 continue;
a93b21ea 883 }
db22d3cc 884 }
53e9c5c4 885 if (VAR_P (var) && DECL_HARD_REGISTER (var) && !is_global_var (var))
e15deb4b 886 cfun->has_local_explicit_reg_vars = true;
2ab2ce89 887
501bdd19 888 if (srcidx != dstidx)
f1f41a6c 889 (*cfun->local_decls)[dstidx] = var;
501bdd19 890 dstidx++;
db22d3cc 891 }
501bdd19 892 if (dstidx != num)
560965e9 893 {
894 statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
895 cfun->local_decls->truncate (dstidx);
896 }
2887c015 897
402d7a90 898 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl),
899 polymorphic_ctor_dtor_p (current_function_decl,
900 true) != NULL_TREE);
5169661d 901 clear_unused_block_pointer ();
2887c015 902
4ae5778c 903 BITMAP_FREE (usedvars);
fe15b701 904
36267649 905 if (dump_file && (dump_flags & TDF_DETAILS))
906 {
907 fprintf (dump_file, "Scope blocks after cleanups:\n");
cee43f7e 908 dump_scope_blocks (dump_file, dump_flags);
36267649 909 }
4b366dd3 910
911 timevar_pop (TV_REMOVE_UNUSED);
db22d3cc 912}
913
4ee9c684 914/* Allocate and return a new live range information object base on MAP. */
915
916static tree_live_info_p
917new_tree_live_info (var_map map)
918{
919 tree_live_info_p live;
4fb07d00 920 basic_block bb;
4ee9c684 921
4fb07d00 922 live = XNEW (struct tree_live_info_d);
4ee9c684 923 live->map = map;
fe672ac0 924 live->num_blocks = last_basic_block_for_fn (cfun);
4ee9c684 925
78775228 926 bitmap_obstack_initialize (&live->livein_obstack);
927 bitmap_obstack_initialize (&live->liveout_obstack);
4ee9c684 928
74bfe107 929 live->livein = XCNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
930 live->liveout = XCNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
931 for (unsigned i = 0; map->vec_bbs.iterate (i, &bb); ++i)
932 {
933 bitmap_initialize (&live->livein[bb->index], &live->livein_obstack);
934 bitmap_initialize (&live->liveout[bb->index], &live->liveout_obstack);
935 }
30928c44 936
fe672ac0 937 live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun));
30928c44 938 live->stack_top = live->work_stack;
939
78775228 940 live->global = BITMAP_ALLOC (NULL);
4ee9c684 941 return live;
942}
943
944
945/* Free storage for live range info object LIVE. */
946
48e1416a 947void
4ee9c684 948delete_tree_live_info (tree_live_info_p live)
949{
78775228 950 if (live->livein)
951 {
952 bitmap_obstack_release (&live->livein_obstack);
953 free (live->livein);
954 }
955 if (live->liveout)
956 {
957 bitmap_obstack_release (&live->liveout_obstack);
958 free (live->liveout);
959 }
960 BITMAP_FREE (live->global);
30928c44 961 free (live->work_stack);
30928c44 962 free (live);
4ee9c684 963}
964
965
48e1416a 966/* Visit basic block BB and propagate any required live on entry bits from
967 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
7920eed5 968 TMP is a temporary work bitmap which is passed in to avoid reallocating
30928c44 969 it each time. */
4ee9c684 970
48e1416a 971static void
78775228 972loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited)
4ee9c684 973{
30928c44 974 edge e;
975 bool change;
976 edge_iterator ei;
977 basic_block pred_bb;
978 bitmap loe;
4ee9c684 979
688425e8 980 gcc_checking_assert (!bitmap_bit_p (visited, bb->index));
08b7917c 981 bitmap_set_bit (visited, bb->index);
688425e8 982
30928c44 983 loe = live_on_entry (live, bb);
4ee9c684 984
30928c44 985 FOR_EACH_EDGE (e, ei, bb->preds)
4ee9c684 986 {
30928c44 987 pred_bb = e->src;
74bfe107 988 if (!region_contains_p (live->map, pred_bb))
30928c44 989 continue;
78775228 990 /* Variables live-on-entry from BB that aren't defined in the
48e1416a 991 predecessor block. This should be the live on entry vars to pred.
30928c44 992 Note that liveout is the DEFs in a block while live on entry is
78775228 993 being calculated.
994 Add these bits to live-on-entry for the pred. if there are any
30928c44 995 changes, and pred_bb has been visited already, add it to the
996 revisit stack. */
78775228 997 change = bitmap_ior_and_compl_into (live_on_entry (live, pred_bb),
998 loe, &live->liveout[pred_bb->index]);
999 if (change
1000 && bitmap_bit_p (visited, pred_bb->index))
30928c44 1001 {
08b7917c 1002 bitmap_clear_bit (visited, pred_bb->index);
30928c44 1003 *(live->stack_top)++ = pred_bb->index;
1004 }
4ee9c684 1005 }
1006}
1007
1008
48e1416a 1009/* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
7920eed5 1010 of all the variables. */
4ee9c684 1011
30928c44 1012static void
1013live_worklist (tree_live_info_p live)
4ee9c684 1014{
30928c44 1015 unsigned b;
4ee9c684 1016 basic_block bb;
3c6549f8 1017 auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
4ee9c684 1018
53c5d9d4 1019 bitmap_clear (visited);
4ee9c684 1020
74bfe107 1021 /* Visit region's blocks in reverse order and propagate live on entry values
30928c44 1022 into the predecessors blocks. */
74bfe107 1023 for (unsigned i = live->map->vec_bbs.length () - 1;
1024 live->map->vec_bbs.iterate (i, &bb); --i)
78775228 1025 loe_visit_block (live, bb, visited);
4ee9c684 1026
30928c44 1027 /* Process any blocks which require further iteration. */
1028 while (live->stack_top != live->work_stack)
4ee9c684 1029 {
30928c44 1030 b = *--(live->stack_top);
78775228 1031 loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited);
30928c44 1032 }
30928c44 1033}
4ee9c684 1034
4ee9c684 1035
7920eed5 1036/* Calculate the initial live on entry vector for SSA_NAME using immediate_use
30928c44 1037 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1038 in the liveout vector. */
4ee9c684 1039
30928c44 1040static void
1041set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1042{
1043 int p;
42acab1c 1044 gimple *stmt;
30928c44 1045 use_operand_p use;
1046 basic_block def_bb = NULL;
1047 imm_use_iterator imm_iter;
1048 bool global = false;
4ee9c684 1049
30928c44 1050 p = var_to_partition (live->map, ssa_name);
1051 if (p == NO_PARTITION)
1052 return;
4ee9c684 1053
30928c44 1054 stmt = SSA_NAME_DEF_STMT (ssa_name);
1055 if (stmt)
4ee9c684 1056 {
75a70cf9 1057 def_bb = gimple_bb (stmt);
2d043327 1058 /* Mark defs in liveout bitmap temporarily. */
74bfe107 1059 if (def_bb && region_contains_p (live->map, def_bb))
4fb07d00 1060 bitmap_set_bit (&live->liveout[def_bb->index], p);
0cc4271a 1061 }
30928c44 1062 else
34154e27 1063 def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
4ee9c684 1064
4d8d655b 1065 /* An undefined local variable does not need to be very alive. */
1066 if (ssa_undefined_value_p (ssa_name, false))
1067 return;
1068
30928c44 1069 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1070 add it to the list of live on entry blocks. */
1071 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
4ee9c684 1072 {
42acab1c 1073 gimple *use_stmt = USE_STMT (use);
30928c44 1074 basic_block add_block = NULL;
4ee9c684 1075
75a70cf9 1076 if (gimple_code (use_stmt) == GIMPLE_PHI)
30928c44 1077 {
1078 /* Uses in PHI's are considered to be live at exit of the SRC block
1079 as this is where a copy would be inserted. Check to see if it is
1080 defined in that block, or whether its live on entry. */
1081 int index = PHI_ARG_INDEX_FROM_USE (use);
1a91d914 1082 edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt), index);
74bfe107 1083 if (e->src != def_bb && region_contains_p (live->map, e->src))
1084 add_block = e->src;
30928c44 1085 }
9845d120 1086 else if (is_gimple_debug (use_stmt))
1087 continue;
30928c44 1088 else
1089 {
1090 /* If its not defined in this block, its live on entry. */
75a70cf9 1091 basic_block use_bb = gimple_bb (use_stmt);
74bfe107 1092 if (use_bb != def_bb && region_contains_p (live->map, use_bb))
30928c44 1093 add_block = use_bb;
48e1416a 1094 }
30928c44 1095
1096 /* If there was a live on entry use, set the bit. */
1097 if (add_block)
1098 {
1099 global = true;
4fb07d00 1100 bitmap_set_bit (&live->livein[add_block->index], p);
4ee9c684 1101 }
1102 }
4ee9c684 1103
30928c44 1104 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1105 on entry blocks between the def and all the uses. */
1106 if (global)
1107 bitmap_set_bit (live->global, p);
4ee9c684 1108}
1109
1110
1111/* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1112
78775228 1113static void
4ee9c684 1114calculate_live_on_exit (tree_live_info_p liveinfo)
1115{
4ee9c684 1116 basic_block bb;
1117 edge e;
30928c44 1118 edge_iterator ei;
4ee9c684 1119
2d043327 1120 /* live on entry calculations used liveout vectors for defs, clear them. */
74bfe107 1121 for (unsigned i = 0; liveinfo->map->vec_bbs.iterate (i, &bb); ++i)
4fb07d00 1122 bitmap_clear (&liveinfo->liveout[bb->index]);
4ee9c684 1123
1124 /* Set all the live-on-exit bits for uses in PHIs. */
fc00614f 1125 FOR_EACH_BB_FN (bb, cfun)
4ee9c684 1126 {
1a91d914 1127 gphi_iterator gsi;
75a70cf9 1128 size_t i;
1129
30928c44 1130 /* Mark the PHI arguments which are live on exit to the pred block. */
75a70cf9 1131 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1132 {
1a91d914 1133 gphi *phi = gsi.phi ();
74bfe107 1134 if (virtual_operand_p (gimple_phi_result (phi)))
1135 continue;
75a70cf9 1136 for (i = 0; i < gimple_phi_num_args (phi); i++)
48e1416a 1137 {
75a70cf9 1138 tree t = PHI_ARG_DEF (phi, i);
1139 int p;
1140
1141 if (TREE_CODE (t) != SSA_NAME)
1142 continue;
1143
1144 p = var_to_partition (liveinfo->map, t);
1145 if (p == NO_PARTITION)
1146 continue;
1147 e = gimple_phi_arg_edge (phi, i);
74bfe107 1148 if (region_contains_p (liveinfo->map, e->src))
4fb07d00 1149 bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
75a70cf9 1150 }
1151 }
30928c44 1152
74bfe107 1153 if (!region_contains_p (liveinfo->map, bb))
1154 continue;
1155
2d043327 1156 /* Add each successors live on entry to this bock live on exit. */
30928c44 1157 FOR_EACH_EDGE (e, ei, bb->succs)
74bfe107 1158 if (region_contains_p (liveinfo->map, e->dest))
4fb07d00 1159 bitmap_ior_into (&liveinfo->liveout[bb->index],
30928c44 1160 live_on_entry (liveinfo, e->dest));
4ee9c684 1161 }
30928c44 1162}
1163
2d043327 1164
48e1416a 1165/* Given partition map MAP, calculate all the live on entry bitmaps for
30928c44 1166 each partition. Return a new live info object. */
1167
48e1416a 1168tree_live_info_p
78775228 1169calculate_live_ranges (var_map map, bool want_livein)
30928c44 1170{
1171 tree var;
1172 unsigned i;
1173 tree_live_info_p live;
4ee9c684 1174
30928c44 1175 live = new_tree_live_info (map);
4ee9c684 1176 for (i = 0; i < num_var_partitions (map); i++)
1177 {
30928c44 1178 var = partition_to_var (map, i);
1179 if (var != NULL_TREE)
1180 set_var_live_on_entry (var, live);
4ee9c684 1181 }
1182
30928c44 1183 live_worklist (live);
1184
382ecba7 1185 if (flag_checking)
1186 verify_live_on_entry (live);
30928c44 1187
1188 calculate_live_on_exit (live);
78775228 1189
1190 if (!want_livein)
1191 {
1192 bitmap_obstack_release (&live->livein_obstack);
1193 free (live->livein);
1194 live->livein = NULL;
1195 }
1196
30928c44 1197 return live;
4ee9c684 1198}
91b30299 1199\f
1200/* Data structure for compute_live_vars* functions. */
1201
1202struct compute_live_vars_data {
1203 /* Vector of bitmaps for live vars indices at the end of basic blocks,
1204 indexed by bb->index. ACTIVE[ENTRY_BLOCK] must be empty bitmap,
1205 ACTIVE[EXIT_BLOCK] is used for STOP_AFTER. */
1206 vec<bitmap_head> active;
1207 /* Work bitmap of currently live variables. */
1208 bitmap work;
1209 /* Set of interesting variables. Variables with uids not in this
1210 hash_map are not tracked. */
1211 live_vars_map *vars;
1212};
1213
1214/* Callback for walk_stmt_load_store_addr_ops. If OP is a VAR_DECL with
1215 uid set in DATA->vars, enter its corresponding index into bitmap
1216 DATA->work. */
4ee9c684 1217
91b30299 1218static bool
1219compute_live_vars_visit (gimple *, tree op, tree, void *pdata)
1220{
1221 compute_live_vars_data *data = (compute_live_vars_data *) pdata;
1222 op = get_base_address (op);
1223 if (op && VAR_P (op))
1224 if (unsigned int *v = data->vars->get (DECL_UID (op)))
1225 bitmap_set_bit (data->work, *v);
1226 return false;
1227}
1228
1229/* Helper routine for compute_live_vars, calculating the sets of live
1230 variables at the end of BB, leaving the result in DATA->work.
1231 If STOP_AFTER is non-NULL, stop processing after that stmt. */
1232
1233static void
1234compute_live_vars_1 (basic_block bb, compute_live_vars_data *data,
1235 gimple *stop_after)
1236{
1237 edge e;
1238 edge_iterator ei;
1239 gimple_stmt_iterator gsi;
1240 walk_stmt_load_store_addr_fn visit = compute_live_vars_visit;
1241
1242 bitmap_clear (data->work);
1243 FOR_EACH_EDGE (e, ei, bb->preds)
1244 bitmap_ior_into (data->work, &data->active[e->src->index]);
1245
1246 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1247 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), data, NULL, NULL, visit);
1248 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1249 {
1250 gimple *stmt = gsi_stmt (gsi);
1251
1252 if (gimple_clobber_p (stmt))
1253 {
1254 tree lhs = gimple_assign_lhs (stmt);
1255 if (VAR_P (lhs))
1256 if (unsigned int *v = data->vars->get (DECL_UID (lhs)))
1257 bitmap_clear_bit (data->work, *v);
1258 }
1259 else if (!is_gimple_debug (stmt))
1260 walk_stmt_load_store_addr_ops (stmt, data, visit, visit, visit);
1261 if (stmt == stop_after)
1262 break;
1263 }
1264}
1265
1266/* For function FN and live_vars_map (hash map from DECL_UIDs to a dense set of
1267 indexes of automatic variables VARS, compute which of those variables are
1268 (might be) live at the end of each basic block. */
1269
1270vec<bitmap_head>
1271compute_live_vars (struct function *fn, live_vars_map *vars)
1272{
1273 vec<bitmap_head> active;
4ee9c684 1274
91b30299 1275 /* We approximate the live range of a stack variable by taking the first
1276 mention of its name as starting point(s), and by the end-of-scope
1277 death clobber added by gimplify as ending point(s) of the range.
1278 This overapproximates in the case we for instance moved an address-taken
1279 operation upward, without also moving a dereference to it upwards.
1280 But it's conservatively correct as a variable never can hold values
1281 before its name is mentioned at least once.
1282
1283 We then do a mostly classical bitmap liveness algorithm. */
1284
1285 active.create (last_basic_block_for_fn (fn));
1286 active.quick_grow (last_basic_block_for_fn (fn));
1287 for (int i = 0; i < last_basic_block_for_fn (fn); i++)
1288 bitmap_initialize (&active[i], &bitmap_default_obstack);
1289
1290 bitmap work = BITMAP_ALLOC (NULL);
1291
1292 int *rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
1293 int n_bbs = pre_and_rev_post_order_compute_fn (fn, NULL, rpo, false);
1294
1295 bool changed = true;
1296 compute_live_vars_data data = { active, work, vars };
1297 while (changed)
1298 {
1299 int i;
1300 changed = false;
1301 for (i = 0; i < n_bbs; i++)
1302 {
1303 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
1304 compute_live_vars_1 (bb, &data, NULL);
1305 if (bitmap_ior_into (&active[bb->index], work))
1306 changed = true;
1307 }
1308 }
1309
1310 free (rpo);
1311 BITMAP_FREE (work);
1312
1313 return active;
1314}
1315
1316/* For ACTIVE computed by compute_live_vars, compute a bitmap of variables
1317 live after the STOP_AFTER statement and return that bitmap. */
1318
1319bitmap
1320live_vars_at_stmt (vec<bitmap_head> &active, live_vars_map *vars,
1321 gimple *stop_after)
1322{
1323 bitmap work = BITMAP_ALLOC (NULL);
1324 compute_live_vars_data data = { active, work, vars };
1325 basic_block bb = gimple_bb (stop_after);
1326 compute_live_vars_1 (bb, &data, stop_after);
1327 return work;
1328}
1329
1330/* Destroy what compute_live_vars has returned when it is no longer needed. */
1331
1332void
1333destroy_live_vars (vec<bitmap_head> &active)
1334{
1335 unsigned len = active.length ();
1336 for (unsigned i = 0; i < len; i++)
1337 bitmap_clear (&active[i]);
1338
1339 active.release ();
1340}
1341\f
4ee9c684 1342/* Output partition map MAP to file F. */
1343
1344void
1345dump_var_map (FILE *f, var_map map)
1346{
1347 int t;
1348 unsigned x, y;
1349 int p;
1350
1351 fprintf (f, "\nPartition map \n\n");
1352
1353 for (x = 0; x < map->num_partitions; x++)
1354 {
2d043327 1355 if (map->view_to_partition != NULL)
1356 p = map->view_to_partition[x];
4ee9c684 1357 else
1358 p = x;
1359
ade7d11b 1360 if (ssa_name (p) == NULL_TREE
1361 || virtual_operand_p (ssa_name (p)))
4ee9c684 1362 continue;
1363
1364 t = 0;
c211d998 1365 for (y = 1; y < num_ssa_names; y++)
4ee9c684 1366 {
1367 p = partition_find (map->var_partition, y);
2d043327 1368 if (map->partition_to_view)
1369 p = map->partition_to_view[p];
4ee9c684 1370 if (p == (int)x)
1371 {
1372 if (t++ == 0)
1373 {
9af5ce0c 1374 fprintf (f, "Partition %d (", x);
4ee9c684 1375 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1376 fprintf (f, " - ");
1377 }
1378 fprintf (f, "%d ", y);
1379 }
1380 }
1381 if (t != 0)
1382 fprintf (f, ")\n");
1383 }
1384 fprintf (f, "\n");
1385}
1386
1387
c7d89805 1388/* Generic dump for the above. */
1389
1390DEBUG_FUNCTION void
1391debug (_var_map &ref)
1392{
1393 dump_var_map (stderr, &ref);
1394}
1395
1396DEBUG_FUNCTION void
1397debug (_var_map *ptr)
1398{
1399 if (ptr)
1400 debug (*ptr);
1401 else
1402 fprintf (stderr, "<nil>\n");
1403}
1404
1405
4ee9c684 1406/* Output live range info LIVE to file F, controlled by FLAG. */
1407
1408void
1409dump_live_info (FILE *f, tree_live_info_p live, int flag)
1410{
1411 basic_block bb;
4f917ffe 1412 unsigned i;
4ee9c684 1413 var_map map = live->map;
0cc4271a 1414 bitmap_iterator bi;
4ee9c684 1415
1416 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1417 {
fc00614f 1418 FOR_EACH_BB_FN (bb, cfun)
4ee9c684 1419 {
1420 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
4fb07d00 1421 EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
4ee9c684 1422 {
30928c44 1423 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1424 fprintf (f, " ");
4ee9c684 1425 }
1426 fprintf (f, "\n");
1427 }
1428 }
1429
1430 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1431 {
fc00614f 1432 FOR_EACH_BB_FN (bb, cfun)
4ee9c684 1433 {
1434 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
4fb07d00 1435 EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
4ee9c684 1436 {
1437 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1438 fprintf (f, " ");
0cc4271a 1439 }
4ee9c684 1440 fprintf (f, "\n");
1441 }
1442 }
1443}
8c0963c4 1444
c7d89805 1445
1446/* Generic dump for the above. */
1447
1448DEBUG_FUNCTION void
1449debug (tree_live_info_d &ref)
1450{
1451 dump_live_info (stderr, &ref, 0);
1452}
1453
1454DEBUG_FUNCTION void
1455debug (tree_live_info_d *ptr)
1456{
1457 if (ptr)
1458 debug (*ptr);
1459 else
1460 fprintf (stderr, "<nil>\n");
1461}
1462
1463
30928c44 1464/* Verify that the info in LIVE matches the current cfg. */
2d043327 1465
30928c44 1466static void
1467verify_live_on_entry (tree_live_info_p live)
1468{
1469 unsigned i;
1470 tree var;
42acab1c 1471 gimple *stmt;
30928c44 1472 basic_block bb;
1473 edge e;
1474 int num;
1475 edge_iterator ei;
1476 var_map map = live->map;
1477
1478 /* Check for live on entry partitions and report those with a DEF in
1479 the program. This will typically mean an optimization has done
1480 something wrong. */
34154e27 1481 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
30928c44 1482 num = 0;
1483 FOR_EACH_EDGE (e, ei, bb->succs)
1484 {
1485 int entry_block = e->dest->index;
74bfe107 1486 if (!region_contains_p (live->map, e->dest))
30928c44 1487 continue;
1488 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1489 {
1490 basic_block tmp;
ec11736b 1491 tree d = NULL_TREE;
30928c44 1492 bitmap loe;
1493 var = partition_to_var (map, i);
1494 stmt = SSA_NAME_DEF_STMT (var);
75a70cf9 1495 tmp = gimple_bb (stmt);
ec11736b 1496 if (SSA_NAME_VAR (var))
1497 d = ssa_default_def (cfun, SSA_NAME_VAR (var));
30928c44 1498
1499 loe = live_on_entry (live, e->dest);
1500 if (loe && bitmap_bit_p (loe, i))
1501 {
75a70cf9 1502 if (!gimple_nop_p (stmt))
30928c44 1503 {
1504 num++;
1505 print_generic_expr (stderr, var, TDF_SLIM);
1506 fprintf (stderr, " is defined ");
1507 if (tmp)
1508 fprintf (stderr, " in BB%d, ", tmp->index);
1509 fprintf (stderr, "by:\n");
75a70cf9 1510 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
48e1416a 1511 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
30928c44 1512 entry_block);
1513 fprintf (stderr, " So it appears to have multiple defs.\n");
1514 }
1515 else
1516 {
1517 if (d != var)
1518 {
1519 num++;
1520 print_generic_expr (stderr, var, TDF_SLIM);
75a70cf9 1521 fprintf (stderr, " is live-on-entry to BB%d ",
1522 entry_block);
30928c44 1523 if (d)
1524 {
1525 fprintf (stderr, " but is not the default def of ");
1526 print_generic_expr (stderr, d, TDF_SLIM);
1527 fprintf (stderr, "\n");
1528 }
1529 else
1530 fprintf (stderr, " and there is no default def.\n");
1531 }
1532 }
1533 }
1534 else
1535 if (d == var)
1536 {
4d8d655b 1537 /* An undefined local variable does not need to be very
1538 alive. */
1539 if (ssa_undefined_value_p (var, false))
1540 continue;
1541
48e1416a 1542 /* The only way this var shouldn't be marked live on entry is
30928c44 1543 if it occurs in a PHI argument of the block. */
75a70cf9 1544 size_t z;
1545 bool ok = false;
1a91d914 1546 gphi_iterator gsi;
75a70cf9 1547 for (gsi = gsi_start_phis (e->dest);
1548 !gsi_end_p (gsi) && !ok;
1549 gsi_next (&gsi))
30928c44 1550 {
1a91d914 1551 gphi *phi = gsi.phi ();
74bfe107 1552 if (virtual_operand_p (gimple_phi_result (phi)))
1553 continue;
75a70cf9 1554 for (z = 0; z < gimple_phi_num_args (phi); z++)
1555 if (var == gimple_phi_arg_def (phi, z))
30928c44 1556 {
75a70cf9 1557 ok = true;
30928c44 1558 break;
1559 }
1560 }
1561 if (ok)
1562 continue;
b2df3bbf 1563 /* Expand adds unused default defs for PARM_DECLs and
1564 RESULT_DECLs. They're ok. */
1565 if (has_zero_uses (var)
1566 && SSA_NAME_VAR (var)
1567 && !VAR_P (SSA_NAME_VAR (var)))
1568 continue;
30928c44 1569 num++;
1570 print_generic_expr (stderr, var, TDF_SLIM);
48e1416a 1571 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
30928c44 1572 entry_block);
1573 fprintf (stderr, "but it is a default def so it should be.\n");
1574 }
1575 }
1576 }
1577 gcc_assert (num <= 0);
1578}