]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-live.c
[PATCH 7/9] ENABLE_CHECKING refactoring: middle-end, LTO FE
[thirdparty/gcc.git] / gcc / tree-ssa-live.c
CommitLineData
4ee9c684 1/* Liveness for SSA trees.
d353bf18 2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
4ee9c684 3 Contributed by Andrew MacLeod <amacleod@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
4ee9c684 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4ee9c684 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
9ef16211 24#include "backend.h"
4ee9c684 25#include "tree.h"
9ef16211 26#include "gimple.h"
27#include "rtl.h"
28#include "ssa.h"
29#include "alias.h"
b20a8bb4 30#include "fold-const.h"
ce084dfc 31#include "gimple-pretty-print.h"
bc61cadb 32#include "internal-fn.h"
dcf1a1ec 33#include "gimple-iterator.h"
d53441c8 34#include "flags.h"
d53441c8 35#include "insn-config.h"
36#include "expmed.h"
37#include "dojump.h"
38#include "explow.h"
39#include "calls.h"
40#include "emit-rtl.h"
41#include "varasm.h"
42#include "stmt.h"
9ed99284 43#include "expr.h"
073c1fd5 44#include "tree-dfa.h"
b9ed1410 45#include "timevar.h"
46#include "dumpfile.h"
4ee9c684 47#include "tree-ssa-live.h"
0b205f4c 48#include "diagnostic-core.h"
2ff2a12b 49#include "debug.h"
4d8d655b 50#include "tree-ssa.h"
a467a47b 51#include "cgraph.h"
52#include "ipa-utils.h"
6c01cf92 53#include "cfgloop.h"
2d043327 54
382ecba7 55static void verify_live_on_entry (tree_live_info_p);
4ee9c684 56
4ee9c684 57
2d043327 58/* VARMAP maintains a mapping from SSA version number to real variables.
59
60 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
61 only member of it's own partition. Coalescing will attempt to group any
62 ssa_names which occur in a copy or in a PHI node into the same partition.
63
64 At the end of out-of-ssa, each partition becomes a "real" variable and is
65 rewritten as a compiler variable.
66
f0b5f617 67 The var_map data structure is used to manage these partitions. It allows
2d043327 68 partitions to be combined, and determines which partition belongs to what
69 ssa_name or variable, and vice versa. */
70
71
2d043327 72/* Remove the base table in MAP. */
4ee9c684 73
2d043327 74static void
75var_map_base_fini (var_map map)
76{
77 /* Free the basevar info if it is present. */
78 if (map->partition_to_base_index != NULL)
79 {
2d043327 80 free (map->partition_to_base_index);
81 map->partition_to_base_index = NULL;
82 map->num_basevars = 0;
83 }
84}
4ee9c684 85/* Create a variable partition map of SIZE, initialize and return it. */
86
87var_map
88init_var_map (int size)
89{
90 var_map map;
91
92 map = (var_map) xmalloc (sizeof (struct _var_map));
93 map->var_partition = partition_new (size);
4ee9c684 94
2d043327 95 map->partition_to_view = NULL;
96 map->view_to_partition = NULL;
4ee9c684 97 map->num_partitions = size;
98 map->partition_size = size;
2d043327 99 map->num_basevars = 0;
100 map->partition_to_base_index = NULL;
4ee9c684 101 return map;
102}
103
104
105/* Free memory associated with MAP. */
106
107void
108delete_var_map (var_map map)
109{
2d043327 110 var_map_base_fini (map);
4ee9c684 111 partition_delete (map->var_partition);
dd045aee 112 free (map->partition_to_view);
113 free (map->view_to_partition);
4ee9c684 114 free (map);
115}
116
117
48e1416a 118/* This function will combine the partitions in MAP for VAR1 and VAR2. It
119 Returns the partition which represents the new partition. If the two
0bed3869 120 partitions cannot be combined, NO_PARTITION is returned. */
4ee9c684 121
122int
123var_union (var_map map, tree var1, tree var2)
124{
125 int p1, p2, p3;
a8dd994c 126
127 gcc_assert (TREE_CODE (var1) == SSA_NAME);
128 gcc_assert (TREE_CODE (var2) == SSA_NAME);
4ee9c684 129
48e1416a 130 /* This is independent of partition_to_view. If partition_to_view is
4ee9c684 131 on, then whichever one of these partitions is absorbed will never have a
2d043327 132 dereference into the partition_to_view array any more. */
4ee9c684 133
a8dd994c 134 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
135 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
4ee9c684 136
8c0963c4 137 gcc_assert (p1 != NO_PARTITION);
138 gcc_assert (p2 != NO_PARTITION);
4ee9c684 139
140 if (p1 == p2)
141 p3 = p1;
142 else
143 p3 = partition_union (map->var_partition, p1, p2);
144
2d043327 145 if (map->partition_to_view)
146 p3 = map->partition_to_view[p3];
4ee9c684 147
4ee9c684 148 return p3;
149}
150
48e1416a 151
152/* Compress the partition numbers in MAP such that they fall in the range
4ee9c684 153 0..(num_partitions-1) instead of wherever they turned out during
154 the partitioning exercise. This removes any references to unused
155 partitions, thereby allowing bitmaps and other vectors to be much
48e1416a 156 denser.
4ee9c684 157
158 This is implemented such that compaction doesn't affect partitioning.
159 Ie., once partitions are created and possibly merged, running one
160 or more different kind of compaction will not affect the partitions
161 themselves. Their index might change, but all the same variables will
162 still be members of the same partition group. This allows work on reduced
163 sets, and no loss of information when a larger set is later desired.
164
165 In particular, coalescing can work on partitions which have 2 or more
166 definitions, and then 'recompact' later to include all the single
167 definitions for assignment to program variables. */
168
2d043327 169
48e1416a 170/* Set MAP back to the initial state of having no partition view. Return a
171 bitmap which has a bit set for each partition number which is in use in the
2d043327 172 varmap. */
173
174static bitmap
175partition_view_init (var_map map)
4ee9c684 176{
2d043327 177 bitmap used;
178 int tmp;
179 unsigned int x;
4ee9c684 180
2d043327 181 used = BITMAP_ALLOC (NULL);
4ee9c684 182
2d043327 183 /* Already in a view? Abandon the old one. */
184 if (map->partition_to_view)
4ee9c684 185 {
2d043327 186 free (map->partition_to_view);
187 map->partition_to_view = NULL;
4ee9c684 188 }
2d043327 189 if (map->view_to_partition)
4ee9c684 190 {
2d043327 191 free (map->view_to_partition);
192 map->view_to_partition = NULL;
4ee9c684 193 }
194
4ee9c684 195 /* Find out which partitions are actually referenced. */
2d043327 196 for (x = 0; x < map->partition_size; x++)
4ee9c684 197 {
198 tmp = partition_find (map->var_partition, x);
7c782c9b 199 if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
a8dd994c 200 && (!has_zero_uses (ssa_name (tmp))
b2df3bbf 201 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))
202 || (SSA_NAME_VAR (ssa_name (tmp))
203 && !VAR_P (SSA_NAME_VAR (ssa_name (tmp))))))
2d043327 204 bitmap_set_bit (used, tmp);
4ee9c684 205 }
206
2d043327 207 map->num_partitions = map->partition_size;
208 return used;
209}
210
211
212/* This routine will finalize the view data for MAP based on the partitions
48e1416a 213 set in SELECTED. This is either the same bitmap returned from
2d043327 214 partition_view_init, or a trimmed down version if some of those partitions
215 were not desired in this view. SELECTED is freed before returning. */
216
48e1416a 217static void
2d043327 218partition_view_fini (var_map map, bitmap selected)
219{
220 bitmap_iterator bi;
221 unsigned count, i, x, limit;
2d043327 222
223 gcc_assert (selected);
224
225 count = bitmap_count_bits (selected);
226 limit = map->partition_size;
227
228 /* If its a one-to-one ratio, we don't need any view compaction. */
229 if (count < limit)
4ee9c684 230 {
2d043327 231 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
232 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
233 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
3e790786 234
2d043327 235 i = 0;
236 /* Give each selected partition an index. */
237 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
4ee9c684 238 {
2d043327 239 map->partition_to_view[x] = i;
240 map->view_to_partition[i] = x;
2d043327 241 i++;
3e790786 242 }
2d043327 243 gcc_assert (i == count);
244 map->num_partitions = i;
4ee9c684 245 }
2d043327 246
247 BITMAP_FREE (selected);
248}
249
250
94f92c36 251/* Create a partition view which includes all the used partitions in MAP. */
2d043327 252
4fb07d00 253void
94f92c36 254partition_view_normal (var_map map)
2d043327 255{
256 bitmap used;
257
258 used = partition_view_init (map);
259 partition_view_fini (map, used);
260
94f92c36 261 var_map_base_fini (map);
2d043327 262}
263
264
48e1416a 265/* Create a partition view in MAP which includes just partitions which occur in
266 the bitmap ONLY. If WANT_BASES is true, create the base variable map
2d043327 267 as well. */
268
4fb07d00 269void
94f92c36 270partition_view_bitmap (var_map map, bitmap only)
2d043327 271{
272 bitmap used;
273 bitmap new_partitions = BITMAP_ALLOC (NULL);
274 unsigned x, p;
275 bitmap_iterator bi;
276
277 used = partition_view_init (map);
278 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
4ee9c684 279 {
2d043327 280 p = partition_find (map->var_partition, x);
281 gcc_assert (bitmap_bit_p (used, p));
282 bitmap_set_bit (new_partitions, p);
4ee9c684 283 }
2d043327 284 partition_view_fini (map, new_partitions);
4ee9c684 285
94f92c36 286 var_map_base_fini (map);
4ee9c684 287}
288
289
4ae5778c 290static bitmap usedvars;
291
920bd157 292/* Mark VAR as used, so that it'll be preserved during rtl expansion.
293 Returns true if VAR wasn't marked before. */
4ae5778c 294
920bd157 295static inline bool
4ae5778c 296set_is_used (tree var)
297{
920bd157 298 return bitmap_set_bit (usedvars, DECL_UID (var));
4ae5778c 299}
300
301/* Return true if VAR is marked as used. */
302
303static inline bool
304is_used_p (tree var)
305{
306 return bitmap_bit_p (usedvars, DECL_UID (var));
307}
308
920bd157 309static inline void mark_all_vars_used (tree *);
4ee9c684 310
280450fa 311/* Helper function for mark_all_vars_used, called via walk_tree. */
312
313static tree
920bd157 314mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
280450fa 315{
316 tree t = *tp;
2ff2a12b 317 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
318 tree b;
280450fa 319
db22d3cc 320 if (TREE_CODE (t) == SSA_NAME)
ec11736b 321 {
322 *walk_subtrees = 0;
323 t = SSA_NAME_VAR (t);
324 if (!t)
325 return NULL;
326 }
75a70cf9 327
328 if (IS_EXPR_CODE_CLASS (c)
2ff2a12b 329 && (b = TREE_BLOCK (t)) != NULL)
330 TREE_USED (b) = true;
db22d3cc 331
28daba6f 332 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
333 fields do not contain vars. */
b06d1934 334 if (TREE_CODE (t) == TARGET_MEM_REF)
335 {
920bd157 336 mark_all_vars_used (&TMR_BASE (t));
337 mark_all_vars_used (&TMR_INDEX (t));
338 mark_all_vars_used (&TMR_INDEX2 (t));
b06d1934 339 *walk_subtrees = 0;
340 return NULL;
341 }
342
280450fa 343 /* Only need to mark VAR_DECLS; parameters and return results are not
344 eliminated as unused. */
345 if (TREE_CODE (t) == VAR_DECL)
a93b21ea 346 {
920bd157 347 /* When a global var becomes used for the first time also walk its
348 initializer (non global ones don't have any). */
c8a9710d 349 if (set_is_used (t) && is_global_var (t)
350 && DECL_CONTEXT (t) == current_function_decl)
920bd157 351 mark_all_vars_used (&DECL_INITIAL (t));
a93b21ea 352 }
6ceec668 353 /* remove_unused_scope_block_p requires information about labels
354 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
ad75582e 355 else if (TREE_CODE (t) == LABEL_DECL)
6ceec668 356 /* Although the TREE_USED values that the frontend uses would be
357 acceptable (albeit slightly over-conservative) for our purposes,
358 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
359 must re-compute it here. */
360 TREE_USED (t) = 1;
280450fa 361
ce45a448 362 if (IS_TYPE_OR_DECL_P (t))
280450fa 363 *walk_subtrees = 0;
364
365 return NULL;
366}
367
2ff2a12b 368/* Mark the scope block SCOPE and its subblocks unused when they can be
369 possibly eliminated if dead. */
370
371static void
372mark_scope_block_unused (tree scope)
373{
374 tree t;
375 TREE_USED (scope) = false;
376 if (!(*debug_hooks->ignore_block) (scope))
377 TREE_USED (scope) = true;
378 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
379 mark_scope_block_unused (t);
380}
381
382/* Look if the block is dead (by possibly eliminating its dead subblocks)
48e1416a 383 and return true if so.
2ff2a12b 384 Block is declared dead if:
385 1) No statements are associated with it.
386 2) Declares no live variables
387 3) All subblocks are dead
388 or there is precisely one subblocks and the block
389 has same abstract origin as outer block and declares
390 no variables, so it is pure wrapper.
4a7e4fcc 391 When we are not outputting full debug info, we also eliminate dead variables
2ff2a12b 392 out of scope blocks to let them to be recycled by GGC and to save copying work
393 done by the inliner. */
394
395static bool
a467a47b 396remove_unused_scope_block_p (tree scope, bool in_ctor_dtor_block)
2ff2a12b 397{
8c796c7c 398 tree *t, *next;
2ff2a12b 399 bool unused = !TREE_USED (scope);
2ff2a12b 400 int nsubblocks = 0;
401
a467a47b 402 /* For ipa-polymorphic-call.c purposes, preserve blocks:
403 1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones */
404 if (inlined_polymorphic_ctor_dtor_block_p (scope, true))
405 {
406 in_ctor_dtor_block = true;
407 unused = false;
408 }
409 /* 2) inside such blocks, the outermost block with BLOCK_ABSTRACT_ORIGIN
410 being a FUNCTION_DECL. */
411 else if (in_ctor_dtor_block
412 && BLOCK_ABSTRACT_ORIGIN (scope)
413 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (scope)) == FUNCTION_DECL)
414 {
415 in_ctor_dtor_block = false;
416 unused = false;
417 }
418
8c796c7c 419 for (t = &BLOCK_VARS (scope); *t; t = next)
420 {
1767a056 421 next = &DECL_CHAIN (*t);
8c796c7c 422
423 /* Debug info of nested function refers to the block of the
36267649 424 function. We might stil call it even if all statements
425 of function it was nested into was elliminated.
48e1416a 426
36267649 427 TODO: We can actually look into cgraph to see if function
428 will be output to file. */
8c796c7c 429 if (TREE_CODE (*t) == FUNCTION_DECL)
430 unused = false;
ee093c13 431
432 /* If a decl has a value expr, we need to instantiate it
433 regardless of debug info generation, to avoid codegen
434 differences in memory overlap tests. update_equiv_regs() may
435 indirectly call validate_equiv_mem() to test whether a
436 SET_DEST overlaps with others, and if the value expr changes
437 by virtual register instantiation, we may get end up with
438 different results. */
439 else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t))
440 unused = false;
441
25db41e9 442 /* Remove everything we don't generate debug info for. */
443 else if (DECL_IGNORED_P (*t))
64f6c8c9 444 {
1767a056 445 *t = DECL_CHAIN (*t);
64f6c8c9 446 next = t;
447 }
448
8c796c7c 449 /* When we are outputting debug info, we usually want to output
450 info about optimized-out variables in the scope blocks.
451 Exception are the scope blocks not containing any instructions
452 at all so user can't get into the scopes at first place. */
920bd157 453 else if (is_used_p (*t))
8c796c7c 454 unused = false;
6ceec668 455 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
456 /* For labels that are still used in the IL, the decision to
457 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
458 risk having different ordering in debug vs. non-debug builds
459 during inlining or versioning.
460 A label appearing here (we have already checked DECL_IGNORED_P)
461 should not be used in the IL unless it has been explicitly used
462 before, so we use TREE_USED as an approximation. */
463 /* In principle, we should do the same here as for the debug case
464 below, however, when debugging, there might be additional nested
465 levels that keep an upper level with a label live, so we have to
466 force this block to be considered used, too. */
467 unused = false;
8c796c7c 468
469 /* When we are not doing full debug info, we however can keep around
470 only the used variables for cfgexpand's memory packing saving quite
48e1416a 471 a lot of memory.
36267649 472
473 For sake of -g3, we keep around those vars but we don't count this as
474 use of block, so innermost block with no used vars and no instructions
475 can be considered dead. We only want to keep around blocks user can
48e1416a 476 breakpoint into and ask about value of optimized out variables.
36267649 477
29bcbc13 478 Similarly we need to keep around types at least until all
479 variables of all nested blocks are gone. We track no
480 information on whether given type is used or not, so we have
481 to keep them even when not emitting debug information,
482 otherwise we may end up remapping variables and their (local)
483 types in different orders depending on whether debug
484 information is being generated. */
485
486 else if (TREE_CODE (*t) == TYPE_DECL
487 || debug_info_level == DINFO_LEVEL_NORMAL
45b2f957 488 || debug_info_level == DINFO_LEVEL_VERBOSE)
36267649 489 ;
7f481d3e 490 else
8c796c7c 491 {
1767a056 492 *t = DECL_CHAIN (*t);
8c796c7c 493 next = t;
494 }
495 }
496
2ff2a12b 497 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
a467a47b 498 if (remove_unused_scope_block_p (*t, in_ctor_dtor_block))
2ff2a12b 499 {
500 if (BLOCK_SUBBLOCKS (*t))
501 {
502 tree next = BLOCK_CHAIN (*t);
503 tree supercontext = BLOCK_SUPERCONTEXT (*t);
cee43f7e 504
2ff2a12b 505 *t = BLOCK_SUBBLOCKS (*t);
cee43f7e 506 while (BLOCK_CHAIN (*t))
507 {
508 BLOCK_SUPERCONTEXT (*t) = supercontext;
509 t = &BLOCK_CHAIN (*t);
510 }
2ff2a12b 511 BLOCK_CHAIN (*t) = next;
512 BLOCK_SUPERCONTEXT (*t) = supercontext;
513 t = &BLOCK_CHAIN (*t);
514 nsubblocks ++;
515 }
516 else
36267649 517 *t = BLOCK_CHAIN (*t);
2ff2a12b 518 }
519 else
520 {
521 t = &BLOCK_CHAIN (*t);
522 nsubblocks ++;
523 }
cee43f7e 524
525
526 if (!unused)
527 ;
2ff2a12b 528 /* Outer scope is always used. */
cee43f7e 529 else if (!BLOCK_SUPERCONTEXT (scope)
530 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
2ff2a12b 531 unused = false;
cee43f7e 532 /* Innermost blocks with no live variables nor statements can be always
533 eliminated. */
534 else if (!nsubblocks)
535 ;
7fa9fa16 536 /* When not generating debug info we can eliminate info on unused
537 variables. */
94bed7c3 538 else if (!flag_auto_profile && debug_info_level == DINFO_LEVEL_NONE)
f63d3ecc 539 {
7fa9fa16 540 /* Even for -g0 don't prune outer scopes from artificial
f63d3ecc 541 functions, otherwise diagnostics using tree_nonartificial_location
542 will not be emitted properly. */
543 if (inlined_function_outer_scope_p (scope))
544 {
545 tree ao = scope;
546
547 while (ao
548 && TREE_CODE (ao) == BLOCK
549 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
550 ao = BLOCK_ABSTRACT_ORIGIN (ao);
551 if (ao
552 && TREE_CODE (ao) == FUNCTION_DECL
553 && DECL_DECLARED_INLINE_P (ao)
554 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
555 unused = false;
556 }
557 }
4b5d70fd 558 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
2ff2a12b 559 unused = false;
cee43f7e 560 /* See if this block is important for representation of inlined function.
561 Inlined functions are always represented by block with
562 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
563 set... */
564 else if (inlined_function_outer_scope_p (scope))
565 unused = false;
566 else
567 /* Verfify that only blocks with source location set
568 are entry points to the inlined functions. */
8e7408e3 569 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
570 == UNKNOWN_LOCATION);
b26d0c9e 571
572 TREE_USED (scope) = !unused;
2ff2a12b 573 return unused;
574}
2d043327 575
48e1416a 576/* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
280450fa 577 eliminated during the tree->rtl conversion process. */
578
579static inline void
920bd157 580mark_all_vars_used (tree *expr_p)
280450fa 581{
920bd157 582 walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
280450fa 583}
584
5169661d 585/* Helper function for clear_unused_block_pointer, called via walk_tree. */
586
587static tree
588clear_unused_block_pointer_1 (tree *tp, int *, void *)
589{
590 if (EXPR_P (*tp) && TREE_BLOCK (*tp)
591 && !TREE_USED (TREE_BLOCK (*tp)))
592 TREE_SET_BLOCK (*tp, NULL);
5169661d 593 return NULL_TREE;
594}
595
9f559b20 596/* Set all block pointer in debug or clobber stmt to NULL if the block
597 is unused, so that they will not be streamed out. */
5169661d 598
599static void
f1ff4562 600clear_unused_block_pointer (void)
5169661d 601{
602 basic_block bb;
603 gimple_stmt_iterator gsi;
1aa7a266 604
fc00614f 605 FOR_EACH_BB_FN (bb, cfun)
5169661d 606 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
607 {
608 unsigned i;
609 tree b;
42acab1c 610 gimple *stmt = gsi_stmt (gsi);
5169661d 611
9f559b20 612 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
5169661d 613 continue;
614 b = gimple_block (stmt);
615 if (b && !TREE_USED (b))
616 gimple_set_block (stmt, NULL);
617 for (i = 0; i < gimple_num_ops (stmt); i++)
618 walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
619 NULL, NULL);
620 }
621}
b79917fd 622
623/* Dump scope blocks starting at SCOPE to FILE. INDENT is the
624 indentation level and FLAGS is as in print_generic_expr. */
36267649 625
626static void
627dump_scope_block (FILE *file, int indent, tree scope, int flags)
628{
629 tree var, t;
4b5d70fd 630 unsigned int i;
36267649 631
cee43f7e 632 fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
633 TREE_USED (scope) ? "" : " (unused)",
634 BLOCK_ABSTRACT (scope) ? " (abstract)": "");
8e7408e3 635 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
cee43f7e 636 {
637 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
638 fprintf (file, " %s:%i", s.file, s.line);
639 }
640 if (BLOCK_ABSTRACT_ORIGIN (scope))
36267649 641 {
cee43f7e 642 tree origin = block_ultimate_origin (scope);
643 if (origin)
644 {
645 fprintf (file, " Originating from :");
646 if (DECL_P (origin))
647 print_generic_decl (file, origin, flags);
648 else
649 fprintf (file, "#%i", BLOCK_NUMBER (origin));
650 }
36267649 651 }
cee43f7e 652 fprintf (file, " \n");
1767a056 653 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
36267649 654 {
f665f7bb 655 fprintf (file, "%*s", indent, "");
36267649 656 print_generic_decl (file, var, flags);
4ae5778c 657 fprintf (file, "\n");
36267649 658 }
4b5d70fd 659 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
660 {
661 fprintf (file, "%*s",indent, "");
662 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
663 flags);
664 fprintf (file, " (nonlocalized)\n");
665 }
36267649 666 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
667 dump_scope_block (file, indent + 2, t, flags);
cee43f7e 668 fprintf (file, "\n%*s}\n",indent, "");
36267649 669}
670
7496b609 671/* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
672 is as in print_generic_expr. */
673
4b987fac 674DEBUG_FUNCTION void
7496b609 675debug_scope_block (tree scope, int flags)
676{
677 dump_scope_block (stderr, 0, scope, flags);
678}
679
b79917fd 680
681/* Dump the tree of lexical scopes of current_function_decl to FILE.
682 FLAGS is as in print_generic_expr. */
683
cee43f7e 684void
685dump_scope_blocks (FILE *file, int flags)
686{
687 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
688}
db22d3cc 689
b79917fd 690
691/* Dump the tree of lexical scopes of current_function_decl to stderr.
692 FLAGS is as in print_generic_expr. */
693
4b987fac 694DEBUG_FUNCTION void
b79917fd 695debug_scope_blocks (int flags)
696{
697 dump_scope_blocks (stderr, flags);
698}
699
db22d3cc 700/* Remove local variables that are not referenced in the IL. */
701
702void
703remove_unused_locals (void)
704{
705 basic_block bb;
b03e5397 706 tree var;
920bd157 707 unsigned srcidx, dstidx, num;
3c25489e 708 bool have_local_clobbers = false;
db22d3cc 709
fe5c69b7 710 /* Removing declarations from lexical blocks when not optimizing is
711 not only a waste of time, it actually causes differences in stack
712 layout. */
713 if (!optimize)
714 return;
715
4b366dd3 716 timevar_push (TV_REMOVE_UNUSED);
717
36267649 718 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
75a70cf9 719
4ae5778c 720 usedvars = BITMAP_ALLOC (NULL);
db22d3cc 721
722 /* Walk the CFG marking all referenced symbols. */
fc00614f 723 FOR_EACH_BB_FN (bb, cfun)
db22d3cc 724 {
75a70cf9 725 gimple_stmt_iterator gsi;
726 size_t i;
32dedf8f 727 edge_iterator ei;
728 edge e;
db22d3cc 729
730 /* Walk the statements. */
75a70cf9 731 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
732 {
42acab1c 733 gimple *stmt = gsi_stmt (gsi);
75a70cf9 734 tree b = gimple_block (stmt);
735
9845d120 736 if (is_gimple_debug (stmt))
737 continue;
738
3c25489e 739 if (gimple_clobber_p (stmt))
740 {
741 have_local_clobbers = true;
742 continue;
743 }
744
75a70cf9 745 if (b)
746 TREE_USED (b) = true;
db22d3cc 747
75a70cf9 748 for (i = 0; i < gimple_num_ops (stmt); i++)
920bd157 749 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
75a70cf9 750 }
751
1a91d914 752 for (gphi_iterator gpi = gsi_start_phis (bb);
753 !gsi_end_p (gpi);
754 gsi_next (&gpi))
db22d3cc 755 {
756 use_operand_p arg_p;
757 ssa_op_iter i;
75a70cf9 758 tree def;
1a91d914 759 gphi *phi = gpi.phi ();
db22d3cc 760
7c782c9b 761 if (virtual_operand_p (gimple_phi_result (phi)))
db22d3cc 762 continue;
763
75a70cf9 764 def = gimple_phi_result (phi);
920bd157 765 mark_all_vars_used (&def);
db22d3cc 766
767 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
768 {
769 tree arg = USE_FROM_PTR (arg_p);
5169661d 770 int index = PHI_ARG_INDEX_FROM_USE (arg_p);
771 tree block =
772 LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
773 if (block != NULL)
774 TREE_USED (block) = true;
920bd157 775 mark_all_vars_used (&arg);
db22d3cc 776 }
777 }
32dedf8f 778
779 FOR_EACH_EDGE (e, ei, bb->succs)
f1ff4562 780 if (LOCATION_BLOCK (e->goto_locus) != NULL)
5169661d 781 TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
db22d3cc 782 }
783
3c25489e 784 /* We do a two-pass approach about the out-of-scope clobbers. We want
785 to remove them if they are the only references to a local variable,
786 but we want to retain them when there's any other. So the first pass
787 ignores them, and the second pass (if there were any) tries to remove
788 them. */
789 if (have_local_clobbers)
fc00614f 790 FOR_EACH_BB_FN (bb, cfun)
3c25489e 791 {
792 gimple_stmt_iterator gsi;
793
794 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
795 {
42acab1c 796 gimple *stmt = gsi_stmt (gsi);
3c25489e 797 tree b = gimple_block (stmt);
798
799 if (gimple_clobber_p (stmt))
800 {
801 tree lhs = gimple_assign_lhs (stmt);
9f559b20 802 tree base = get_base_address (lhs);
803 /* Remove clobbers referencing unused vars, or clobbers
804 with MEM_REF lhs referencing uninitialized pointers. */
805 if ((TREE_CODE (base) == VAR_DECL && !is_used_p (base))
806 || (TREE_CODE (lhs) == MEM_REF
807 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
808 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))
809 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))
810 != PARM_DECL)))
3c25489e 811 {
812 unlink_stmt_vdef (stmt);
813 gsi_remove (&gsi, true);
814 release_defs (stmt);
815 continue;
816 }
817 if (b)
818 TREE_USED (b) = true;
819 }
820 gsi_next (&gsi);
821 }
822 }
823
6c01cf92 824 if (cfun->has_simduid_loops)
825 {
826 struct loop *loop;
827 FOR_EACH_LOOP (loop, 0)
828 if (loop->simduid && !is_used_p (loop->simduid))
829 loop->simduid = NULL_TREE;
830 }
831
e15deb4b 832 cfun->has_local_explicit_reg_vars = false;
833
920bd157 834 /* Remove unmarked local and global vars from local_decls. */
f1f41a6c 835 num = vec_safe_length (cfun->local_decls);
501bdd19 836 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
db22d3cc 837 {
f1f41a6c 838 var = (*cfun->local_decls)[srcidx];
ad75582e 839 if (TREE_CODE (var) == VAR_DECL)
db22d3cc 840 {
920bd157 841 if (!is_used_p (var))
ad75582e 842 {
1ba198c0 843 tree def;
7843e4bc 844 if (cfun->nonlocal_goto_save_area
845 && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
846 cfun->nonlocal_goto_save_area = NULL;
1ba198c0 847 /* Release any default def associated with var. */
848 if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
849 {
850 set_ssa_default_def (cfun, var, NULL_TREE);
851 release_ssa_name (def);
852 }
ad75582e 853 continue;
a93b21ea 854 }
db22d3cc 855 }
ad75582e 856 if (TREE_CODE (var) == VAR_DECL
857 && DECL_HARD_REGISTER (var)
858 && !is_global_var (var))
e15deb4b 859 cfun->has_local_explicit_reg_vars = true;
2ab2ce89 860
501bdd19 861 if (srcidx != dstidx)
f1f41a6c 862 (*cfun->local_decls)[dstidx] = var;
501bdd19 863 dstidx++;
db22d3cc 864 }
501bdd19 865 if (dstidx != num)
560965e9 866 {
867 statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
868 cfun->local_decls->truncate (dstidx);
869 }
2887c015 870
a467a47b 871 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl), false);
5169661d 872 clear_unused_block_pointer ();
2887c015 873
4ae5778c 874 BITMAP_FREE (usedvars);
fe15b701 875
36267649 876 if (dump_file && (dump_flags & TDF_DETAILS))
877 {
878 fprintf (dump_file, "Scope blocks after cleanups:\n");
cee43f7e 879 dump_scope_blocks (dump_file, dump_flags);
36267649 880 }
4b366dd3 881
882 timevar_pop (TV_REMOVE_UNUSED);
db22d3cc 883}
884
4ee9c684 885/* Allocate and return a new live range information object base on MAP. */
886
887static tree_live_info_p
888new_tree_live_info (var_map map)
889{
890 tree_live_info_p live;
4fb07d00 891 basic_block bb;
4ee9c684 892
4fb07d00 893 live = XNEW (struct tree_live_info_d);
4ee9c684 894 live->map = map;
fe672ac0 895 live->num_blocks = last_basic_block_for_fn (cfun);
4ee9c684 896
78775228 897 bitmap_obstack_initialize (&live->livein_obstack);
898 bitmap_obstack_initialize (&live->liveout_obstack);
fe672ac0 899 live->livein = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
fc00614f 900 FOR_EACH_BB_FN (bb, cfun)
78775228 901 bitmap_initialize (&live->livein[bb->index], &live->livein_obstack);
4ee9c684 902
fe672ac0 903 live->liveout = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
fc00614f 904 FOR_EACH_BB_FN (bb, cfun)
78775228 905 bitmap_initialize (&live->liveout[bb->index], &live->liveout_obstack);
30928c44 906
fe672ac0 907 live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun));
30928c44 908 live->stack_top = live->work_stack;
909
78775228 910 live->global = BITMAP_ALLOC (NULL);
4ee9c684 911 return live;
912}
913
914
915/* Free storage for live range info object LIVE. */
916
48e1416a 917void
4ee9c684 918delete_tree_live_info (tree_live_info_p live)
919{
78775228 920 if (live->livein)
921 {
922 bitmap_obstack_release (&live->livein_obstack);
923 free (live->livein);
924 }
925 if (live->liveout)
926 {
927 bitmap_obstack_release (&live->liveout_obstack);
928 free (live->liveout);
929 }
930 BITMAP_FREE (live->global);
30928c44 931 free (live->work_stack);
30928c44 932 free (live);
4ee9c684 933}
934
935
48e1416a 936/* Visit basic block BB and propagate any required live on entry bits from
937 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
7920eed5 938 TMP is a temporary work bitmap which is passed in to avoid reallocating
30928c44 939 it each time. */
4ee9c684 940
48e1416a 941static void
78775228 942loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited)
4ee9c684 943{
30928c44 944 edge e;
945 bool change;
946 edge_iterator ei;
947 basic_block pred_bb;
948 bitmap loe;
4ee9c684 949
688425e8 950 gcc_checking_assert (!bitmap_bit_p (visited, bb->index));
08b7917c 951 bitmap_set_bit (visited, bb->index);
688425e8 952
30928c44 953 loe = live_on_entry (live, bb);
4ee9c684 954
30928c44 955 FOR_EACH_EDGE (e, ei, bb->preds)
4ee9c684 956 {
30928c44 957 pred_bb = e->src;
34154e27 958 if (pred_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
30928c44 959 continue;
78775228 960 /* Variables live-on-entry from BB that aren't defined in the
48e1416a 961 predecessor block. This should be the live on entry vars to pred.
30928c44 962 Note that liveout is the DEFs in a block while live on entry is
78775228 963 being calculated.
964 Add these bits to live-on-entry for the pred. if there are any
30928c44 965 changes, and pred_bb has been visited already, add it to the
966 revisit stack. */
78775228 967 change = bitmap_ior_and_compl_into (live_on_entry (live, pred_bb),
968 loe, &live->liveout[pred_bb->index]);
969 if (change
970 && bitmap_bit_p (visited, pred_bb->index))
30928c44 971 {
08b7917c 972 bitmap_clear_bit (visited, pred_bb->index);
30928c44 973 *(live->stack_top)++ = pred_bb->index;
974 }
4ee9c684 975 }
976}
977
978
48e1416a 979/* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
7920eed5 980 of all the variables. */
4ee9c684 981
30928c44 982static void
983live_worklist (tree_live_info_p live)
4ee9c684 984{
30928c44 985 unsigned b;
4ee9c684 986 basic_block bb;
fe672ac0 987 sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
4ee9c684 988
53c5d9d4 989 bitmap_clear (visited);
4ee9c684 990
80777cd8 991 /* Visit all the blocks in reverse order and propagate live on entry values
30928c44 992 into the predecessors blocks. */
7a46197b 993 FOR_EACH_BB_REVERSE_FN (bb, cfun)
78775228 994 loe_visit_block (live, bb, visited);
4ee9c684 995
30928c44 996 /* Process any blocks which require further iteration. */
997 while (live->stack_top != live->work_stack)
4ee9c684 998 {
30928c44 999 b = *--(live->stack_top);
78775228 1000 loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited);
30928c44 1001 }
4ee9c684 1002
30928c44 1003 sbitmap_free (visited);
1004}
4ee9c684 1005
4ee9c684 1006
7920eed5 1007/* Calculate the initial live on entry vector for SSA_NAME using immediate_use
30928c44 1008 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1009 in the liveout vector. */
4ee9c684 1010
30928c44 1011static void
1012set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1013{
1014 int p;
42acab1c 1015 gimple *stmt;
30928c44 1016 use_operand_p use;
1017 basic_block def_bb = NULL;
1018 imm_use_iterator imm_iter;
1019 bool global = false;
4ee9c684 1020
30928c44 1021 p = var_to_partition (live->map, ssa_name);
1022 if (p == NO_PARTITION)
1023 return;
4ee9c684 1024
30928c44 1025 stmt = SSA_NAME_DEF_STMT (ssa_name);
1026 if (stmt)
4ee9c684 1027 {
75a70cf9 1028 def_bb = gimple_bb (stmt);
2d043327 1029 /* Mark defs in liveout bitmap temporarily. */
30928c44 1030 if (def_bb)
4fb07d00 1031 bitmap_set_bit (&live->liveout[def_bb->index], p);
0cc4271a 1032 }
30928c44 1033 else
34154e27 1034 def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
4ee9c684 1035
4d8d655b 1036 /* An undefined local variable does not need to be very alive. */
1037 if (ssa_undefined_value_p (ssa_name, false))
1038 return;
1039
30928c44 1040 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1041 add it to the list of live on entry blocks. */
1042 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
4ee9c684 1043 {
42acab1c 1044 gimple *use_stmt = USE_STMT (use);
30928c44 1045 basic_block add_block = NULL;
4ee9c684 1046
75a70cf9 1047 if (gimple_code (use_stmt) == GIMPLE_PHI)
30928c44 1048 {
1049 /* Uses in PHI's are considered to be live at exit of the SRC block
1050 as this is where a copy would be inserted. Check to see if it is
1051 defined in that block, or whether its live on entry. */
1052 int index = PHI_ARG_INDEX_FROM_USE (use);
1a91d914 1053 edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt), index);
34154e27 1054 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
4ee9c684 1055 {
30928c44 1056 if (e->src != def_bb)
1057 add_block = e->src;
4ee9c684 1058 }
30928c44 1059 }
9845d120 1060 else if (is_gimple_debug (use_stmt))
1061 continue;
30928c44 1062 else
1063 {
1064 /* If its not defined in this block, its live on entry. */
75a70cf9 1065 basic_block use_bb = gimple_bb (use_stmt);
30928c44 1066 if (use_bb != def_bb)
1067 add_block = use_bb;
48e1416a 1068 }
30928c44 1069
1070 /* If there was a live on entry use, set the bit. */
1071 if (add_block)
1072 {
1073 global = true;
4fb07d00 1074 bitmap_set_bit (&live->livein[add_block->index], p);
4ee9c684 1075 }
1076 }
4ee9c684 1077
30928c44 1078 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1079 on entry blocks between the def and all the uses. */
1080 if (global)
1081 bitmap_set_bit (live->global, p);
4ee9c684 1082}
1083
1084
1085/* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1086
78775228 1087static void
4ee9c684 1088calculate_live_on_exit (tree_live_info_p liveinfo)
1089{
4ee9c684 1090 basic_block bb;
1091 edge e;
30928c44 1092 edge_iterator ei;
4ee9c684 1093
2d043327 1094 /* live on entry calculations used liveout vectors for defs, clear them. */
fc00614f 1095 FOR_EACH_BB_FN (bb, cfun)
4fb07d00 1096 bitmap_clear (&liveinfo->liveout[bb->index]);
4ee9c684 1097
1098 /* Set all the live-on-exit bits for uses in PHIs. */
fc00614f 1099 FOR_EACH_BB_FN (bb, cfun)
4ee9c684 1100 {
1a91d914 1101 gphi_iterator gsi;
75a70cf9 1102 size_t i;
1103
30928c44 1104 /* Mark the PHI arguments which are live on exit to the pred block. */
75a70cf9 1105 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1106 {
1a91d914 1107 gphi *phi = gsi.phi ();
75a70cf9 1108 for (i = 0; i < gimple_phi_num_args (phi); i++)
48e1416a 1109 {
75a70cf9 1110 tree t = PHI_ARG_DEF (phi, i);
1111 int p;
1112
1113 if (TREE_CODE (t) != SSA_NAME)
1114 continue;
1115
1116 p = var_to_partition (liveinfo->map, t);
1117 if (p == NO_PARTITION)
1118 continue;
1119 e = gimple_phi_arg_edge (phi, i);
34154e27 1120 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
4fb07d00 1121 bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
75a70cf9 1122 }
1123 }
30928c44 1124
2d043327 1125 /* Add each successors live on entry to this bock live on exit. */
30928c44 1126 FOR_EACH_EDGE (e, ei, bb->succs)
34154e27 1127 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
4fb07d00 1128 bitmap_ior_into (&liveinfo->liveout[bb->index],
30928c44 1129 live_on_entry (liveinfo, e->dest));
4ee9c684 1130 }
30928c44 1131}
1132
2d043327 1133
48e1416a 1134/* Given partition map MAP, calculate all the live on entry bitmaps for
30928c44 1135 each partition. Return a new live info object. */
1136
48e1416a 1137tree_live_info_p
78775228 1138calculate_live_ranges (var_map map, bool want_livein)
30928c44 1139{
1140 tree var;
1141 unsigned i;
1142 tree_live_info_p live;
4ee9c684 1143
30928c44 1144 live = new_tree_live_info (map);
4ee9c684 1145 for (i = 0; i < num_var_partitions (map); i++)
1146 {
30928c44 1147 var = partition_to_var (map, i);
1148 if (var != NULL_TREE)
1149 set_var_live_on_entry (var, live);
4ee9c684 1150 }
1151
30928c44 1152 live_worklist (live);
1153
382ecba7 1154 if (flag_checking)
1155 verify_live_on_entry (live);
30928c44 1156
1157 calculate_live_on_exit (live);
78775228 1158
1159 if (!want_livein)
1160 {
1161 bitmap_obstack_release (&live->livein_obstack);
1162 free (live->livein);
1163 live->livein = NULL;
1164 }
1165
30928c44 1166 return live;
4ee9c684 1167}
1168
1169
4ee9c684 1170/* Output partition map MAP to file F. */
1171
1172void
1173dump_var_map (FILE *f, var_map map)
1174{
1175 int t;
1176 unsigned x, y;
1177 int p;
1178
1179 fprintf (f, "\nPartition map \n\n");
1180
1181 for (x = 0; x < map->num_partitions; x++)
1182 {
2d043327 1183 if (map->view_to_partition != NULL)
1184 p = map->view_to_partition[x];
4ee9c684 1185 else
1186 p = x;
1187
ade7d11b 1188 if (ssa_name (p) == NULL_TREE
1189 || virtual_operand_p (ssa_name (p)))
4ee9c684 1190 continue;
1191
1192 t = 0;
c211d998 1193 for (y = 1; y < num_ssa_names; y++)
4ee9c684 1194 {
1195 p = partition_find (map->var_partition, y);
2d043327 1196 if (map->partition_to_view)
1197 p = map->partition_to_view[p];
4ee9c684 1198 if (p == (int)x)
1199 {
1200 if (t++ == 0)
1201 {
9af5ce0c 1202 fprintf (f, "Partition %d (", x);
4ee9c684 1203 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1204 fprintf (f, " - ");
1205 }
1206 fprintf (f, "%d ", y);
1207 }
1208 }
1209 if (t != 0)
1210 fprintf (f, ")\n");
1211 }
1212 fprintf (f, "\n");
1213}
1214
1215
c7d89805 1216/* Generic dump for the above. */
1217
1218DEBUG_FUNCTION void
1219debug (_var_map &ref)
1220{
1221 dump_var_map (stderr, &ref);
1222}
1223
1224DEBUG_FUNCTION void
1225debug (_var_map *ptr)
1226{
1227 if (ptr)
1228 debug (*ptr);
1229 else
1230 fprintf (stderr, "<nil>\n");
1231}
1232
1233
4ee9c684 1234/* Output live range info LIVE to file F, controlled by FLAG. */
1235
1236void
1237dump_live_info (FILE *f, tree_live_info_p live, int flag)
1238{
1239 basic_block bb;
4f917ffe 1240 unsigned i;
4ee9c684 1241 var_map map = live->map;
0cc4271a 1242 bitmap_iterator bi;
4ee9c684 1243
1244 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1245 {
fc00614f 1246 FOR_EACH_BB_FN (bb, cfun)
4ee9c684 1247 {
1248 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
4fb07d00 1249 EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
4ee9c684 1250 {
30928c44 1251 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1252 fprintf (f, " ");
4ee9c684 1253 }
1254 fprintf (f, "\n");
1255 }
1256 }
1257
1258 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1259 {
fc00614f 1260 FOR_EACH_BB_FN (bb, cfun)
4ee9c684 1261 {
1262 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
4fb07d00 1263 EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
4ee9c684 1264 {
1265 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1266 fprintf (f, " ");
0cc4271a 1267 }
4ee9c684 1268 fprintf (f, "\n");
1269 }
1270 }
1271}
8c0963c4 1272
c7d89805 1273
1274/* Generic dump for the above. */
1275
1276DEBUG_FUNCTION void
1277debug (tree_live_info_d &ref)
1278{
1279 dump_live_info (stderr, &ref, 0);
1280}
1281
1282DEBUG_FUNCTION void
1283debug (tree_live_info_d *ptr)
1284{
1285 if (ptr)
1286 debug (*ptr);
1287 else
1288 fprintf (stderr, "<nil>\n");
1289}
1290
1291
2d043327 1292/* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1293
8c0963c4 1294void
1295register_ssa_partition_check (tree ssa_var)
1296{
1297 gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
7c782c9b 1298 if (virtual_operand_p (ssa_var))
8c0963c4 1299 {
1300 fprintf (stderr, "Illegally registering a virtual SSA name :");
1301 print_generic_expr (stderr, ssa_var, TDF_SLIM);
1302 fprintf (stderr, " in the SSA->Normal phase.\n");
1303 internal_error ("SSA corruption");
1304 }
1305}
30928c44 1306
1307
1308/* Verify that the info in LIVE matches the current cfg. */
2d043327 1309
30928c44 1310static void
1311verify_live_on_entry (tree_live_info_p live)
1312{
1313 unsigned i;
1314 tree var;
42acab1c 1315 gimple *stmt;
30928c44 1316 basic_block bb;
1317 edge e;
1318 int num;
1319 edge_iterator ei;
1320 var_map map = live->map;
1321
1322 /* Check for live on entry partitions and report those with a DEF in
1323 the program. This will typically mean an optimization has done
1324 something wrong. */
34154e27 1325 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
30928c44 1326 num = 0;
1327 FOR_EACH_EDGE (e, ei, bb->succs)
1328 {
1329 int entry_block = e->dest->index;
34154e27 1330 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
30928c44 1331 continue;
1332 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1333 {
1334 basic_block tmp;
ec11736b 1335 tree d = NULL_TREE;
30928c44 1336 bitmap loe;
1337 var = partition_to_var (map, i);
1338 stmt = SSA_NAME_DEF_STMT (var);
75a70cf9 1339 tmp = gimple_bb (stmt);
ec11736b 1340 if (SSA_NAME_VAR (var))
1341 d = ssa_default_def (cfun, SSA_NAME_VAR (var));
30928c44 1342
1343 loe = live_on_entry (live, e->dest);
1344 if (loe && bitmap_bit_p (loe, i))
1345 {
75a70cf9 1346 if (!gimple_nop_p (stmt))
30928c44 1347 {
1348 num++;
1349 print_generic_expr (stderr, var, TDF_SLIM);
1350 fprintf (stderr, " is defined ");
1351 if (tmp)
1352 fprintf (stderr, " in BB%d, ", tmp->index);
1353 fprintf (stderr, "by:\n");
75a70cf9 1354 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
48e1416a 1355 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
30928c44 1356 entry_block);
1357 fprintf (stderr, " So it appears to have multiple defs.\n");
1358 }
1359 else
1360 {
1361 if (d != var)
1362 {
1363 num++;
1364 print_generic_expr (stderr, var, TDF_SLIM);
75a70cf9 1365 fprintf (stderr, " is live-on-entry to BB%d ",
1366 entry_block);
30928c44 1367 if (d)
1368 {
1369 fprintf (stderr, " but is not the default def of ");
1370 print_generic_expr (stderr, d, TDF_SLIM);
1371 fprintf (stderr, "\n");
1372 }
1373 else
1374 fprintf (stderr, " and there is no default def.\n");
1375 }
1376 }
1377 }
1378 else
1379 if (d == var)
1380 {
4d8d655b 1381 /* An undefined local variable does not need to be very
1382 alive. */
1383 if (ssa_undefined_value_p (var, false))
1384 continue;
1385
48e1416a 1386 /* The only way this var shouldn't be marked live on entry is
30928c44 1387 if it occurs in a PHI argument of the block. */
75a70cf9 1388 size_t z;
1389 bool ok = false;
1a91d914 1390 gphi_iterator gsi;
75a70cf9 1391 for (gsi = gsi_start_phis (e->dest);
1392 !gsi_end_p (gsi) && !ok;
1393 gsi_next (&gsi))
30928c44 1394 {
1a91d914 1395 gphi *phi = gsi.phi ();
75a70cf9 1396 for (z = 0; z < gimple_phi_num_args (phi); z++)
1397 if (var == gimple_phi_arg_def (phi, z))
30928c44 1398 {
75a70cf9 1399 ok = true;
30928c44 1400 break;
1401 }
1402 }
1403 if (ok)
1404 continue;
b2df3bbf 1405 /* Expand adds unused default defs for PARM_DECLs and
1406 RESULT_DECLs. They're ok. */
1407 if (has_zero_uses (var)
1408 && SSA_NAME_VAR (var)
1409 && !VAR_P (SSA_NAME_VAR (var)))
1410 continue;
30928c44 1411 num++;
1412 print_generic_expr (stderr, var, TDF_SLIM);
48e1416a 1413 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
30928c44 1414 entry_block);
1415 fprintf (stderr, "but it is a default def so it should be.\n");
1416 }
1417 }
1418 }
1419 gcc_assert (num <= 0);
1420}