]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-dce.c
re PR tree-optimization/66101 (internal compiler error: in verify_loop_structure...
[thirdparty/gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* Dead code elimination.
24
25 References:
26
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
37
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "hash-set.h"
50 #include "machmode.h"
51 #include "vec.h"
52 #include "double-int.h"
53 #include "input.h"
54 #include "alias.h"
55 #include "symtab.h"
56 #include "wide-int.h"
57 #include "inchash.h"
58 #include "tree.h"
59 #include "fold-const.h"
60 #include "calls.h"
61 #include "gimple-pretty-print.h"
62 #include "predict.h"
63 #include "hard-reg-set.h"
64 #include "function.h"
65 #include "dominance.h"
66 #include "cfg.h"
67 #include "cfganal.h"
68 #include "basic-block.h"
69 #include "tree-ssa-alias.h"
70 #include "internal-fn.h"
71 #include "tree-eh.h"
72 #include "gimple-expr.h"
73 #include "is-a.h"
74 #include "gimple.h"
75 #include "gimplify.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-cfg.h"
79 #include "tree-phinodes.h"
80 #include "ssa-iterators.h"
81 #include "stringpool.h"
82 #include "tree-ssanames.h"
83 #include "tree-ssa-loop-niter.h"
84 #include "tree-into-ssa.h"
85 #include "hashtab.h"
86 #include "rtl.h"
87 #include "flags.h"
88 #include "statistics.h"
89 #include "real.h"
90 #include "fixed-value.h"
91 #include "insn-config.h"
92 #include "expmed.h"
93 #include "dojump.h"
94 #include "explow.h"
95 #include "emit-rtl.h"
96 #include "varasm.h"
97 #include "stmt.h"
98 #include "expr.h"
99 #include "tree-dfa.h"
100 #include "tree-pass.h"
101 #include "cfgloop.h"
102 #include "tree-scalar-evolution.h"
103 #include "tree-chkp.h"
104 #include "tree-ssa-propagate.h"
105 #include "gimple-fold.h"
106
107 static struct stmt_stats
108 {
109 int total;
110 int total_phis;
111 int removed;
112 int removed_phis;
113 } stats;
114
115 #define STMT_NECESSARY GF_PLF_1
116
117 static vec<gimple> worklist;
118
119 /* Vector indicating an SSA name has already been processed and marked
120 as necessary. */
121 static sbitmap processed;
122
123 /* Vector indicating that the last statement of a basic block has already
124 been marked as necessary. */
125 static sbitmap last_stmt_necessary;
126
127 /* Vector indicating that BB contains statements that are live. */
128 static sbitmap bb_contains_live_stmts;
129
130 /* Before we can determine whether a control branch is dead, we need to
131 compute which blocks are control dependent on which edges.
132
133 We expect each block to be control dependent on very few edges so we
134 use a bitmap for each block recording its edges. An array holds the
135 bitmap. The Ith bit in the bitmap is set if that block is dependent
136 on the Ith edge. */
137 static control_dependences *cd;
138
139 /* Vector indicating that a basic block has already had all the edges
140 processed that it is control dependent on. */
141 static sbitmap visited_control_parents;
142
143 /* TRUE if this pass alters the CFG (by removing control statements).
144 FALSE otherwise.
145
146 If this pass alters the CFG, then it will arrange for the dominators
147 to be recomputed. */
148 static bool cfg_altered;
149
150
151 /* If STMT is not already marked necessary, mark it, and add it to the
152 worklist if ADD_TO_WORKLIST is true. */
153
154 static inline void
155 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
156 {
157 gcc_assert (stmt);
158
159 if (gimple_plf (stmt, STMT_NECESSARY))
160 return;
161
162 if (dump_file && (dump_flags & TDF_DETAILS))
163 {
164 fprintf (dump_file, "Marking useful stmt: ");
165 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
166 fprintf (dump_file, "\n");
167 }
168
169 gimple_set_plf (stmt, STMT_NECESSARY, true);
170 if (add_to_worklist)
171 worklist.safe_push (stmt);
172 if (bb_contains_live_stmts && !is_gimple_debug (stmt))
173 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
174 }
175
176
177 /* Mark the statement defining operand OP as necessary. */
178
179 static inline void
180 mark_operand_necessary (tree op)
181 {
182 gimple stmt;
183 int ver;
184
185 gcc_assert (op);
186
187 ver = SSA_NAME_VERSION (op);
188 if (bitmap_bit_p (processed, ver))
189 {
190 stmt = SSA_NAME_DEF_STMT (op);
191 gcc_assert (gimple_nop_p (stmt)
192 || gimple_plf (stmt, STMT_NECESSARY));
193 return;
194 }
195 bitmap_set_bit (processed, ver);
196
197 stmt = SSA_NAME_DEF_STMT (op);
198 gcc_assert (stmt);
199
200 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
201 return;
202
203 if (dump_file && (dump_flags & TDF_DETAILS))
204 {
205 fprintf (dump_file, "marking necessary through ");
206 print_generic_expr (dump_file, op, 0);
207 fprintf (dump_file, " stmt ");
208 print_gimple_stmt (dump_file, stmt, 0, 0);
209 }
210
211 gimple_set_plf (stmt, STMT_NECESSARY, true);
212 if (bb_contains_live_stmts)
213 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
214 worklist.safe_push (stmt);
215 }
216
217
218 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
219 it can make other statements necessary.
220
221 If AGGRESSIVE is false, control statements are conservatively marked as
222 necessary. */
223
224 static void
225 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
226 {
227 /* With non-call exceptions, we have to assume that all statements could
228 throw. If a statement could throw, it can be deemed necessary. */
229 if (cfun->can_throw_non_call_exceptions
230 && !cfun->can_delete_dead_exceptions
231 && stmt_could_throw_p (stmt))
232 {
233 mark_stmt_necessary (stmt, true);
234 return;
235 }
236
237 /* Statements that are implicitly live. Most function calls, asm
238 and return statements are required. Labels and GIMPLE_BIND nodes
239 are kept because they are control flow, and we have no way of
240 knowing whether they can be removed. DCE can eliminate all the
241 other statements in a block, and CFG can then remove the block
242 and labels. */
243 switch (gimple_code (stmt))
244 {
245 case GIMPLE_PREDICT:
246 case GIMPLE_LABEL:
247 mark_stmt_necessary (stmt, false);
248 return;
249
250 case GIMPLE_ASM:
251 case GIMPLE_RESX:
252 case GIMPLE_RETURN:
253 mark_stmt_necessary (stmt, true);
254 return;
255
256 case GIMPLE_CALL:
257 {
258 tree callee = gimple_call_fndecl (stmt);
259 if (callee != NULL_TREE
260 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
261 switch (DECL_FUNCTION_CODE (callee))
262 {
263 case BUILT_IN_MALLOC:
264 case BUILT_IN_ALIGNED_ALLOC:
265 case BUILT_IN_CALLOC:
266 case BUILT_IN_ALLOCA:
267 case BUILT_IN_ALLOCA_WITH_ALIGN:
268 return;
269
270 default:;
271 }
272 /* Most, but not all function calls are required. Function calls that
273 produce no result and have no side effects (i.e. const pure
274 functions) are unnecessary. */
275 if (gimple_has_side_effects (stmt))
276 {
277 mark_stmt_necessary (stmt, true);
278 return;
279 }
280 if (!gimple_call_lhs (stmt))
281 return;
282 break;
283 }
284
285 case GIMPLE_DEBUG:
286 /* Debug temps without a value are not useful. ??? If we could
287 easily locate the debug temp bind stmt for a use thereof,
288 would could refrain from marking all debug temps here, and
289 mark them only if they're used. */
290 if (!gimple_debug_bind_p (stmt)
291 || gimple_debug_bind_has_value_p (stmt)
292 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
293 mark_stmt_necessary (stmt, false);
294 return;
295
296 case GIMPLE_GOTO:
297 gcc_assert (!simple_goto_p (stmt));
298 mark_stmt_necessary (stmt, true);
299 return;
300
301 case GIMPLE_COND:
302 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
303 /* Fall through. */
304
305 case GIMPLE_SWITCH:
306 if (! aggressive)
307 mark_stmt_necessary (stmt, true);
308 break;
309
310 case GIMPLE_ASSIGN:
311 if (gimple_clobber_p (stmt))
312 return;
313 break;
314
315 default:
316 break;
317 }
318
319 /* If the statement has volatile operands, it needs to be preserved.
320 Same for statements that can alter control flow in unpredictable
321 ways. */
322 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
323 {
324 mark_stmt_necessary (stmt, true);
325 return;
326 }
327
328 if (stmt_may_clobber_global_p (stmt))
329 {
330 mark_stmt_necessary (stmt, true);
331 return;
332 }
333
334 return;
335 }
336
337
338 /* Mark the last statement of BB as necessary. */
339
340 static void
341 mark_last_stmt_necessary (basic_block bb)
342 {
343 gimple stmt = last_stmt (bb);
344
345 bitmap_set_bit (last_stmt_necessary, bb->index);
346 bitmap_set_bit (bb_contains_live_stmts, bb->index);
347
348 /* We actually mark the statement only if it is a control statement. */
349 if (stmt && is_ctrl_stmt (stmt))
350 mark_stmt_necessary (stmt, true);
351 }
352
353
354 /* Mark control dependent edges of BB as necessary. We have to do this only
355 once for each basic block so we set the appropriate bit after we're done.
356
357 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
358
359 static void
360 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
361 {
362 bitmap_iterator bi;
363 unsigned edge_number;
364 bool skipped = false;
365
366 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
367
368 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
369 return;
370
371 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
372 0, edge_number, bi)
373 {
374 basic_block cd_bb = cd->get_edge (edge_number)->src;
375
376 if (ignore_self && cd_bb == bb)
377 {
378 skipped = true;
379 continue;
380 }
381
382 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
383 mark_last_stmt_necessary (cd_bb);
384 }
385
386 if (!skipped)
387 bitmap_set_bit (visited_control_parents, bb->index);
388 }
389
390
391 /* Find obviously necessary statements. These are things like most function
392 calls, and stores to file level variables.
393
394 If EL is NULL, control statements are conservatively marked as
395 necessary. Otherwise it contains the list of edges used by control
396 dependence analysis. */
397
398 static void
399 find_obviously_necessary_stmts (bool aggressive)
400 {
401 basic_block bb;
402 gimple_stmt_iterator gsi;
403 edge e;
404 gimple phi, stmt;
405 int flags;
406
407 FOR_EACH_BB_FN (bb, cfun)
408 {
409 /* PHI nodes are never inherently necessary. */
410 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
411 {
412 phi = gsi_stmt (gsi);
413 gimple_set_plf (phi, STMT_NECESSARY, false);
414 }
415
416 /* Check all statements in the block. */
417 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
418 {
419 stmt = gsi_stmt (gsi);
420 gimple_set_plf (stmt, STMT_NECESSARY, false);
421 mark_stmt_if_obviously_necessary (stmt, aggressive);
422 }
423 }
424
425 /* Pure and const functions are finite and thus have no infinite loops in
426 them. */
427 flags = flags_from_decl_or_type (current_function_decl);
428 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
429 return;
430
431 /* Prevent the empty possibly infinite loops from being removed. */
432 if (aggressive)
433 {
434 struct loop *loop;
435 scev_initialize ();
436 if (mark_irreducible_loops ())
437 FOR_EACH_BB_FN (bb, cfun)
438 {
439 edge_iterator ei;
440 FOR_EACH_EDGE (e, ei, bb->succs)
441 if ((e->flags & EDGE_DFS_BACK)
442 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
443 {
444 if (dump_file)
445 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
446 e->src->index, e->dest->index);
447 mark_control_dependent_edges_necessary (e->dest, false);
448 }
449 }
450
451 FOR_EACH_LOOP (loop, 0)
452 if (!finite_loop_p (loop))
453 {
454 if (dump_file)
455 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
456 mark_control_dependent_edges_necessary (loop->latch, false);
457 }
458 scev_finalize ();
459 }
460 }
461
462
463 /* Return true if REF is based on an aliased base, otherwise false. */
464
465 static bool
466 ref_may_be_aliased (tree ref)
467 {
468 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
469 while (handled_component_p (ref))
470 ref = TREE_OPERAND (ref, 0);
471 if (TREE_CODE (ref) == MEM_REF
472 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
473 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
474 return !(DECL_P (ref)
475 && !may_be_aliased (ref));
476 }
477
478 static bitmap visited = NULL;
479 static unsigned int longest_chain = 0;
480 static unsigned int total_chain = 0;
481 static unsigned int nr_walks = 0;
482 static bool chain_ovfl = false;
483
484 /* Worker for the walker that marks reaching definitions of REF,
485 which is based on a non-aliased decl, necessary. It returns
486 true whenever the defining statement of the current VDEF is
487 a kill for REF, as no dominating may-defs are necessary for REF
488 anymore. DATA points to the basic-block that contains the
489 stmt that refers to REF. */
490
491 static bool
492 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
493 {
494 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
495
496 /* All stmts we visit are necessary. */
497 mark_operand_necessary (vdef);
498
499 /* If the stmt lhs kills ref, then we can stop walking. */
500 if (gimple_has_lhs (def_stmt)
501 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
502 /* The assignment is not necessarily carried out if it can throw
503 and we can catch it in the current function where we could inspect
504 the previous value.
505 ??? We only need to care about the RHS throwing. For aggregate
506 assignments or similar calls and non-call exceptions the LHS
507 might throw as well. */
508 && !stmt_can_throw_internal (def_stmt))
509 {
510 tree base, lhs = gimple_get_lhs (def_stmt);
511 HOST_WIDE_INT size, offset, max_size;
512 ao_ref_base (ref);
513 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
514 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
515 so base == refd->base does not always hold. */
516 if (base == ref->base)
517 {
518 /* For a must-alias check we need to be able to constrain
519 the accesses properly. */
520 if (size != -1 && size == max_size
521 && ref->max_size != -1)
522 {
523 if (offset <= ref->offset
524 && offset + size >= ref->offset + ref->max_size)
525 return true;
526 }
527 /* Or they need to be exactly the same. */
528 else if (ref->ref
529 /* Make sure there is no induction variable involved
530 in the references (gcc.c-torture/execute/pr42142.c).
531 The simplest way is to check if the kill dominates
532 the use. */
533 /* But when both are in the same block we cannot
534 easily tell whether we came from a backedge
535 unless we decide to compute stmt UIDs
536 (see PR58246). */
537 && (basic_block) data != gimple_bb (def_stmt)
538 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
539 gimple_bb (def_stmt))
540 && operand_equal_p (ref->ref, lhs, 0))
541 return true;
542 }
543 }
544
545 /* Otherwise keep walking. */
546 return false;
547 }
548
549 static void
550 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
551 {
552 unsigned int chain;
553 ao_ref refd;
554 gcc_assert (!chain_ovfl);
555 ao_ref_init (&refd, ref);
556 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
557 mark_aliased_reaching_defs_necessary_1,
558 gimple_bb (stmt), NULL);
559 if (chain > longest_chain)
560 longest_chain = chain;
561 total_chain += chain;
562 nr_walks++;
563 }
564
565 /* Worker for the walker that marks reaching definitions of REF, which
566 is not based on a non-aliased decl. For simplicity we need to end
567 up marking all may-defs necessary that are not based on a non-aliased
568 decl. The only job of this walker is to skip may-defs based on
569 a non-aliased decl. */
570
571 static bool
572 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
573 tree vdef, void *data ATTRIBUTE_UNUSED)
574 {
575 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
576
577 /* We have to skip already visited (and thus necessary) statements
578 to make the chaining work after we dropped back to simple mode. */
579 if (chain_ovfl
580 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
581 {
582 gcc_assert (gimple_nop_p (def_stmt)
583 || gimple_plf (def_stmt, STMT_NECESSARY));
584 return false;
585 }
586
587 /* We want to skip stores to non-aliased variables. */
588 if (!chain_ovfl
589 && gimple_assign_single_p (def_stmt))
590 {
591 tree lhs = gimple_assign_lhs (def_stmt);
592 if (!ref_may_be_aliased (lhs))
593 return false;
594 }
595
596 /* We want to skip statments that do not constitute stores but have
597 a virtual definition. */
598 if (is_gimple_call (def_stmt))
599 {
600 tree callee = gimple_call_fndecl (def_stmt);
601 if (callee != NULL_TREE
602 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
603 switch (DECL_FUNCTION_CODE (callee))
604 {
605 case BUILT_IN_MALLOC:
606 case BUILT_IN_ALIGNED_ALLOC:
607 case BUILT_IN_CALLOC:
608 case BUILT_IN_ALLOCA:
609 case BUILT_IN_ALLOCA_WITH_ALIGN:
610 case BUILT_IN_FREE:
611 return false;
612
613 default:;
614 }
615 }
616
617 mark_operand_necessary (vdef);
618
619 return false;
620 }
621
622 static void
623 mark_all_reaching_defs_necessary (gimple stmt)
624 {
625 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
626 mark_all_reaching_defs_necessary_1, NULL, &visited);
627 }
628
629 /* Return true for PHI nodes with one or identical arguments
630 can be removed. */
631 static bool
632 degenerate_phi_p (gimple phi)
633 {
634 unsigned int i;
635 tree op = gimple_phi_arg_def (phi, 0);
636 for (i = 1; i < gimple_phi_num_args (phi); i++)
637 if (gimple_phi_arg_def (phi, i) != op)
638 return false;
639 return true;
640 }
641
642 /* Propagate necessity using the operands of necessary statements.
643 Process the uses on each statement in the worklist, and add all
644 feeding statements which contribute to the calculation of this
645 value to the worklist.
646
647 In conservative mode, EL is NULL. */
648
649 static void
650 propagate_necessity (bool aggressive)
651 {
652 gimple stmt;
653
654 if (dump_file && (dump_flags & TDF_DETAILS))
655 fprintf (dump_file, "\nProcessing worklist:\n");
656
657 while (worklist.length () > 0)
658 {
659 /* Take STMT from worklist. */
660 stmt = worklist.pop ();
661
662 if (dump_file && (dump_flags & TDF_DETAILS))
663 {
664 fprintf (dump_file, "processing: ");
665 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
666 fprintf (dump_file, "\n");
667 }
668
669 if (aggressive)
670 {
671 /* Mark the last statement of the basic blocks on which the block
672 containing STMT is control dependent, but only if we haven't
673 already done so. */
674 basic_block bb = gimple_bb (stmt);
675 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
676 && !bitmap_bit_p (visited_control_parents, bb->index))
677 mark_control_dependent_edges_necessary (bb, false);
678 }
679
680 if (gimple_code (stmt) == GIMPLE_PHI
681 /* We do not process virtual PHI nodes nor do we track their
682 necessity. */
683 && !virtual_operand_p (gimple_phi_result (stmt)))
684 {
685 /* PHI nodes are somewhat special in that each PHI alternative has
686 data and control dependencies. All the statements feeding the
687 PHI node's arguments are always necessary. In aggressive mode,
688 we also consider the control dependent edges leading to the
689 predecessor block associated with each PHI alternative as
690 necessary. */
691 gphi *phi = as_a <gphi *> (stmt);
692 size_t k;
693
694 for (k = 0; k < gimple_phi_num_args (stmt); k++)
695 {
696 tree arg = PHI_ARG_DEF (stmt, k);
697 if (TREE_CODE (arg) == SSA_NAME)
698 mark_operand_necessary (arg);
699 }
700
701 /* For PHI operands it matters from where the control flow arrives
702 to the BB. Consider the following example:
703
704 a=exp1;
705 b=exp2;
706 if (test)
707 ;
708 else
709 ;
710 c=PHI(a,b)
711
712 We need to mark control dependence of the empty basic blocks, since they
713 contains computation of PHI operands.
714
715 Doing so is too restrictive in the case the predecestor block is in
716 the loop. Consider:
717
718 if (b)
719 {
720 int i;
721 for (i = 0; i<1000; ++i)
722 ;
723 j = 0;
724 }
725 return j;
726
727 There is PHI for J in the BB containing return statement.
728 In this case the control dependence of predecestor block (that is
729 within the empty loop) also contains the block determining number
730 of iterations of the block that would prevent removing of empty
731 loop in this case.
732
733 This scenario can be avoided by splitting critical edges.
734 To save the critical edge splitting pass we identify how the control
735 dependence would look like if the edge was split.
736
737 Consider the modified CFG created from current CFG by splitting
738 edge B->C. In the postdominance tree of modified CFG, C' is
739 always child of C. There are two cases how chlids of C' can look
740 like:
741
742 1) C' is leaf
743
744 In this case the only basic block C' is control dependent on is B.
745
746 2) C' has single child that is B
747
748 In this case control dependence of C' is same as control
749 dependence of B in original CFG except for block B itself.
750 (since C' postdominate B in modified CFG)
751
752 Now how to decide what case happens? There are two basic options:
753
754 a) C postdominate B. Then C immediately postdominate B and
755 case 2 happens iff there is no other way from B to C except
756 the edge B->C.
757
758 There is other way from B to C iff there is succesor of B that
759 is not postdominated by B. Testing this condition is somewhat
760 expensive, because we need to iterate all succesors of B.
761 We are safe to assume that this does not happen: we will mark B
762 as needed when processing the other path from B to C that is
763 conrol dependent on B and marking control dependencies of B
764 itself is harmless because they will be processed anyway after
765 processing control statement in B.
766
767 b) C does not postdominate B. Always case 1 happens since there is
768 path from C to exit that does not go through B and thus also C'. */
769
770 if (aggressive && !degenerate_phi_p (stmt))
771 {
772 for (k = 0; k < gimple_phi_num_args (stmt); k++)
773 {
774 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
775
776 if (gimple_bb (stmt)
777 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
778 {
779 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
780 mark_last_stmt_necessary (arg_bb);
781 }
782 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
783 && !bitmap_bit_p (visited_control_parents,
784 arg_bb->index))
785 mark_control_dependent_edges_necessary (arg_bb, true);
786 }
787 }
788 }
789 else
790 {
791 /* Propagate through the operands. Examine all the USE, VUSE and
792 VDEF operands in this statement. Mark all the statements
793 which feed this statement's uses as necessary. */
794 ssa_op_iter iter;
795 tree use;
796
797 /* If this is a call to free which is directly fed by an
798 allocation function do not mark that necessary through
799 processing the argument. */
800 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
801 {
802 tree ptr = gimple_call_arg (stmt, 0);
803 gimple def_stmt;
804 tree def_callee;
805 /* If the pointer we free is defined by an allocation
806 function do not add the call to the worklist. */
807 if (TREE_CODE (ptr) == SSA_NAME
808 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
809 && (def_callee = gimple_call_fndecl (def_stmt))
810 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
811 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
812 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
813 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
814 {
815 gimple bounds_def_stmt;
816 tree bounds;
817
818 /* For instrumented calls we should also check used
819 bounds are returned by the same allocation call. */
820 if (!gimple_call_with_bounds_p (stmt)
821 || ((bounds = gimple_call_arg (stmt, 1))
822 && TREE_CODE (bounds) == SSA_NAME
823 && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
824 && chkp_gimple_call_builtin_p (bounds_def_stmt,
825 BUILT_IN_CHKP_BNDRET)
826 && gimple_call_arg (bounds_def_stmt, 0) == ptr))
827 continue;
828 }
829 }
830
831 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
832 mark_operand_necessary (use);
833
834 use = gimple_vuse (stmt);
835 if (!use)
836 continue;
837
838 /* If we dropped to simple mode make all immediately
839 reachable definitions necessary. */
840 if (chain_ovfl)
841 {
842 mark_all_reaching_defs_necessary (stmt);
843 continue;
844 }
845
846 /* For statements that may load from memory (have a VUSE) we
847 have to mark all reaching (may-)definitions as necessary.
848 We partition this task into two cases:
849 1) explicit loads based on decls that are not aliased
850 2) implicit loads (like calls) and explicit loads not
851 based on decls that are not aliased (like indirect
852 references or loads from globals)
853 For 1) we mark all reaching may-defs as necessary, stopping
854 at dominating kills. For 2) we want to mark all dominating
855 references necessary, but non-aliased ones which we handle
856 in 1). By keeping a global visited bitmap for references
857 we walk for 2) we avoid quadratic behavior for those. */
858
859 if (is_gimple_call (stmt))
860 {
861 tree callee = gimple_call_fndecl (stmt);
862 unsigned i;
863
864 /* Calls to functions that are merely acting as barriers
865 or that only store to memory do not make any previous
866 stores necessary. */
867 if (callee != NULL_TREE
868 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
869 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
870 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
871 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
872 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
873 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
874 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
875 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
876 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
877 || (DECL_FUNCTION_CODE (callee)
878 == BUILT_IN_ALLOCA_WITH_ALIGN)
879 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
880 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
881 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
882 continue;
883
884 /* Calls implicitly load from memory, their arguments
885 in addition may explicitly perform memory loads. */
886 mark_all_reaching_defs_necessary (stmt);
887 for (i = 0; i < gimple_call_num_args (stmt); ++i)
888 {
889 tree arg = gimple_call_arg (stmt, i);
890 if (TREE_CODE (arg) == SSA_NAME
891 || is_gimple_min_invariant (arg))
892 continue;
893 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
894 arg = TREE_OPERAND (arg, 0);
895 if (!ref_may_be_aliased (arg))
896 mark_aliased_reaching_defs_necessary (stmt, arg);
897 }
898 }
899 else if (gimple_assign_single_p (stmt))
900 {
901 tree rhs;
902 /* If this is a load mark things necessary. */
903 rhs = gimple_assign_rhs1 (stmt);
904 if (TREE_CODE (rhs) != SSA_NAME
905 && !is_gimple_min_invariant (rhs)
906 && TREE_CODE (rhs) != CONSTRUCTOR)
907 {
908 if (!ref_may_be_aliased (rhs))
909 mark_aliased_reaching_defs_necessary (stmt, rhs);
910 else
911 mark_all_reaching_defs_necessary (stmt);
912 }
913 }
914 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
915 {
916 tree rhs = gimple_return_retval (return_stmt);
917 /* A return statement may perform a load. */
918 if (rhs
919 && TREE_CODE (rhs) != SSA_NAME
920 && !is_gimple_min_invariant (rhs)
921 && TREE_CODE (rhs) != CONSTRUCTOR)
922 {
923 if (!ref_may_be_aliased (rhs))
924 mark_aliased_reaching_defs_necessary (stmt, rhs);
925 else
926 mark_all_reaching_defs_necessary (stmt);
927 }
928 }
929 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
930 {
931 unsigned i;
932 mark_all_reaching_defs_necessary (stmt);
933 /* Inputs may perform loads. */
934 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
935 {
936 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
937 if (TREE_CODE (op) != SSA_NAME
938 && !is_gimple_min_invariant (op)
939 && TREE_CODE (op) != CONSTRUCTOR
940 && !ref_may_be_aliased (op))
941 mark_aliased_reaching_defs_necessary (stmt, op);
942 }
943 }
944 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
945 {
946 /* The beginning of a transaction is a memory barrier. */
947 /* ??? If we were really cool, we'd only be a barrier
948 for the memories touched within the transaction. */
949 mark_all_reaching_defs_necessary (stmt);
950 }
951 else
952 gcc_unreachable ();
953
954 /* If we over-used our alias oracle budget drop to simple
955 mode. The cost metric allows quadratic behavior
956 (number of uses times number of may-defs queries) up to
957 a constant maximal number of queries and after that falls back to
958 super-linear complexity. */
959 if (/* Constant but quadratic for small functions. */
960 total_chain > 128 * 128
961 /* Linear in the number of may-defs. */
962 && total_chain > 32 * longest_chain
963 /* Linear in the number of uses. */
964 && total_chain > nr_walks * 32)
965 {
966 chain_ovfl = true;
967 if (visited)
968 bitmap_clear (visited);
969 }
970 }
971 }
972 }
973
974 /* Remove dead PHI nodes from block BB. */
975
976 static bool
977 remove_dead_phis (basic_block bb)
978 {
979 bool something_changed = false;
980 gphi *phi;
981 gphi_iterator gsi;
982
983 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
984 {
985 stats.total_phis++;
986 phi = gsi.phi ();
987
988 /* We do not track necessity of virtual PHI nodes. Instead do
989 very simple dead PHI removal here. */
990 if (virtual_operand_p (gimple_phi_result (phi)))
991 {
992 /* Virtual PHI nodes with one or identical arguments
993 can be removed. */
994 if (degenerate_phi_p (phi))
995 {
996 tree vdef = gimple_phi_result (phi);
997 tree vuse = gimple_phi_arg_def (phi, 0);
998
999 use_operand_p use_p;
1000 imm_use_iterator iter;
1001 gimple use_stmt;
1002 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1003 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1004 SET_USE (use_p, vuse);
1005 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1006 && TREE_CODE (vuse) == SSA_NAME)
1007 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1008 }
1009 else
1010 gimple_set_plf (phi, STMT_NECESSARY, true);
1011 }
1012
1013 if (!gimple_plf (phi, STMT_NECESSARY))
1014 {
1015 something_changed = true;
1016 if (dump_file && (dump_flags & TDF_DETAILS))
1017 {
1018 fprintf (dump_file, "Deleting : ");
1019 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1020 fprintf (dump_file, "\n");
1021 }
1022
1023 remove_phi_node (&gsi, true);
1024 stats.removed_phis++;
1025 continue;
1026 }
1027
1028 gsi_next (&gsi);
1029 }
1030 return something_changed;
1031 }
1032
1033 /* Forward edge E to respective POST_DOM_BB and update PHIs. */
1034
1035 static edge
1036 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
1037 {
1038 gphi_iterator gsi;
1039 edge e2 = NULL;
1040 edge_iterator ei;
1041
1042 if (dump_file && (dump_flags & TDF_DETAILS))
1043 fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
1044 e->dest->index, post_dom_bb->index);
1045
1046 e2 = redirect_edge_and_branch (e, post_dom_bb);
1047 cfg_altered = true;
1048
1049 /* If edge was already around, no updating is necessary. */
1050 if (e2 != e)
1051 return e2;
1052
1053 if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
1054 {
1055 /* We are sure that for every live PHI we are seeing control dependent BB.
1056 This means that we can pick any edge to duplicate PHI args from. */
1057 FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
1058 if (e2 != e)
1059 break;
1060 for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
1061 {
1062 gphi *phi = gsi.phi ();
1063 tree op;
1064 source_location locus;
1065
1066 /* PHIs for virtuals have no control dependency relation on them.
1067 We are lost here and must force renaming of the symbol. */
1068 if (virtual_operand_p (gimple_phi_result (phi)))
1069 {
1070 mark_virtual_phi_result_for_renaming (phi);
1071 remove_phi_node (&gsi, true);
1072 continue;
1073 }
1074
1075 /* Dead PHI do not imply control dependency. */
1076 if (!gimple_plf (phi, STMT_NECESSARY))
1077 {
1078 gsi_next (&gsi);
1079 continue;
1080 }
1081
1082 op = gimple_phi_arg_def (phi, e2->dest_idx);
1083 locus = gimple_phi_arg_location (phi, e2->dest_idx);
1084 add_phi_arg (phi, op, e, locus);
1085 /* The resulting PHI if not dead can only be degenerate. */
1086 gcc_assert (degenerate_phi_p (phi));
1087 gsi_next (&gsi);
1088 }
1089 }
1090 return e;
1091 }
1092
1093 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1094 containing I so that we don't have to look it up. */
1095
1096 static void
1097 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1098 {
1099 gimple stmt = gsi_stmt (*i);
1100
1101 if (dump_file && (dump_flags & TDF_DETAILS))
1102 {
1103 fprintf (dump_file, "Deleting : ");
1104 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1105 fprintf (dump_file, "\n");
1106 }
1107
1108 stats.removed++;
1109
1110 /* If we have determined that a conditional branch statement contributes
1111 nothing to the program, then we not only remove it, but we also change
1112 the flow graph so that the current block will simply fall-thru to its
1113 immediate post-dominator. The blocks we are circumventing will be
1114 removed by cleanup_tree_cfg if this change in the flow graph makes them
1115 unreachable. */
1116 if (is_ctrl_stmt (stmt))
1117 {
1118 basic_block post_dom_bb;
1119 edge e, e2;
1120 edge_iterator ei;
1121
1122 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1123
1124 e = find_edge (bb, post_dom_bb);
1125
1126 /* If edge is already there, try to use it. This avoids need to update
1127 PHI nodes. Also watch for cases where post dominator does not exists
1128 or is exit block. These can happen for infinite loops as we create
1129 fake edges in the dominator tree. */
1130 if (e)
1131 ;
1132 else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1133 e = EDGE_SUCC (bb, 0);
1134 else
1135 e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1136 gcc_assert (e);
1137 e->probability = REG_BR_PROB_BASE;
1138 e->count = bb->count;
1139
1140 /* The edge is no longer associated with a conditional, so it does
1141 not have TRUE/FALSE flags. */
1142 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1143
1144 /* The lone outgoing edge from BB will be a fallthru edge. */
1145 e->flags |= EDGE_FALLTHRU;
1146
1147 /* Remove the remaining outgoing edges. */
1148 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1149 if (e != e2)
1150 {
1151 cfg_altered = true;
1152 /* If we made a BB unconditionally exit a loop then this
1153 transform alters the set of BBs in the loop. Schedule
1154 a fixup. */
1155 if (loop_exit_edge_p (bb->loop_father, e))
1156 loops_state_set (LOOPS_NEED_FIXUP);
1157 remove_edge (e2);
1158 }
1159 else
1160 ei_next (&ei);
1161 }
1162
1163 /* If this is a store into a variable that is being optimized away,
1164 add a debug bind stmt if possible. */
1165 if (MAY_HAVE_DEBUG_STMTS
1166 && gimple_assign_single_p (stmt)
1167 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1168 {
1169 tree lhs = gimple_assign_lhs (stmt);
1170 if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
1171 && !DECL_IGNORED_P (lhs)
1172 && is_gimple_reg_type (TREE_TYPE (lhs))
1173 && !is_global_var (lhs)
1174 && !DECL_HAS_VALUE_EXPR_P (lhs))
1175 {
1176 tree rhs = gimple_assign_rhs1 (stmt);
1177 gdebug *note
1178 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1179 gsi_insert_after (i, note, GSI_SAME_STMT);
1180 }
1181 }
1182
1183 unlink_stmt_vdef (stmt);
1184 gsi_remove (i, true);
1185 release_defs (stmt);
1186 }
1187
1188 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1189 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1190
1191 static tree
1192 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1193 {
1194 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1195 *walk_subtrees = 0;
1196 if (*tp == (tree) data)
1197 return *tp;
1198 return NULL_TREE;
1199 }
1200
1201 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1202 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1203 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1204 uses. */
1205
1206 static void
1207 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1208 enum tree_code subcode)
1209 {
1210 gimple stmt = gsi_stmt (*gsi);
1211 tree lhs = gimple_call_lhs (stmt);
1212
1213 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1214 return;
1215
1216 imm_use_iterator imm_iter;
1217 use_operand_p use_p;
1218 bool has_debug_uses = false;
1219 bool has_realpart_uses = false;
1220 bool has_other_uses = false;
1221 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1222 {
1223 gimple use_stmt = USE_STMT (use_p);
1224 if (is_gimple_debug (use_stmt))
1225 has_debug_uses = true;
1226 else if (is_gimple_assign (use_stmt)
1227 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1228 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1229 has_realpart_uses = true;
1230 else
1231 {
1232 has_other_uses = true;
1233 break;
1234 }
1235 }
1236
1237 if (!has_realpart_uses || has_other_uses)
1238 return;
1239
1240 tree arg0 = gimple_call_arg (stmt, 0);
1241 tree arg1 = gimple_call_arg (stmt, 1);
1242 location_t loc = gimple_location (stmt);
1243 tree type = TREE_TYPE (TREE_TYPE (lhs));
1244 tree utype = type;
1245 if (!TYPE_UNSIGNED (type))
1246 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1247 tree result = fold_build2_loc (loc, subcode, utype,
1248 fold_convert_loc (loc, utype, arg0),
1249 fold_convert_loc (loc, utype, arg1));
1250 result = fold_convert_loc (loc, type, result);
1251
1252 if (has_debug_uses)
1253 {
1254 gimple use_stmt;
1255 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1256 {
1257 if (!gimple_debug_bind_p (use_stmt))
1258 continue;
1259 tree v = gimple_debug_bind_get_value (use_stmt);
1260 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1261 {
1262 gimple_debug_bind_reset_value (use_stmt);
1263 update_stmt (use_stmt);
1264 }
1265 }
1266 }
1267
1268 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1269 result = drop_tree_overflow (result);
1270 tree overflow = build_zero_cst (type);
1271 tree ctype = build_complex_type (type);
1272 if (TREE_CODE (result) == INTEGER_CST)
1273 result = build_complex (ctype, result, overflow);
1274 else
1275 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1276 ctype, result, overflow);
1277
1278 if (dump_file && (dump_flags & TDF_DETAILS))
1279 {
1280 fprintf (dump_file, "Transforming call: ");
1281 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1282 fprintf (dump_file, "because the overflow result is never used into: ");
1283 print_generic_stmt (dump_file, result, TDF_SLIM);
1284 fprintf (dump_file, "\n");
1285 }
1286
1287 if (!update_call_from_tree (gsi, result))
1288 gimplify_and_update_call_from_tree (gsi, result);
1289 }
1290
1291 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1292 contributes nothing to the program, and can be deleted. */
1293
1294 static bool
1295 eliminate_unnecessary_stmts (void)
1296 {
1297 bool something_changed = false;
1298 basic_block bb;
1299 gimple_stmt_iterator gsi, psi;
1300 gimple stmt;
1301 tree call;
1302 vec<basic_block> h;
1303
1304 if (dump_file && (dump_flags & TDF_DETAILS))
1305 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1306
1307 clear_special_calls ();
1308
1309 /* Walking basic blocks and statements in reverse order avoids
1310 releasing SSA names before any other DEFs that refer to them are
1311 released. This helps avoid loss of debug information, as we get
1312 a chance to propagate all RHSs of removed SSAs into debug uses,
1313 rather than only the latest ones. E.g., consider:
1314
1315 x_3 = y_1 + z_2;
1316 a_5 = x_3 - b_4;
1317 # DEBUG a => a_5
1318
1319 If we were to release x_3 before a_5, when we reached a_5 and
1320 tried to substitute it into the debug stmt, we'd see x_3 there,
1321 but x_3's DEF, type, etc would have already been disconnected.
1322 By going backwards, the debug stmt first changes to:
1323
1324 # DEBUG a => x_3 - b_4
1325
1326 and then to:
1327
1328 # DEBUG a => y_1 + z_2 - b_4
1329
1330 as desired. */
1331 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1332 h = get_all_dominated_blocks (CDI_DOMINATORS,
1333 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1334
1335 while (h.length ())
1336 {
1337 bb = h.pop ();
1338
1339 /* Remove dead statements. */
1340 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1341 {
1342 stmt = gsi_stmt (gsi);
1343
1344 psi = gsi;
1345 gsi_prev (&psi);
1346
1347 stats.total++;
1348
1349 /* We can mark a call to free as not necessary if the
1350 defining statement of its argument is not necessary
1351 (and thus is getting removed). */
1352 if (gimple_plf (stmt, STMT_NECESSARY)
1353 && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1354 {
1355 tree ptr = gimple_call_arg (stmt, 0);
1356 if (TREE_CODE (ptr) == SSA_NAME)
1357 {
1358 gimple def_stmt = SSA_NAME_DEF_STMT (ptr);
1359 if (!gimple_nop_p (def_stmt)
1360 && !gimple_plf (def_stmt, STMT_NECESSARY))
1361 gimple_set_plf (stmt, STMT_NECESSARY, false);
1362 }
1363 /* We did not propagate necessity for free calls fed
1364 by allocation function to allow unnecessary
1365 alloc-free sequence elimination. For instrumented
1366 calls it also means we did not mark bounds producer
1367 as necessary and it is time to do it in case free
1368 call is not removed. */
1369 if (gimple_call_with_bounds_p (stmt))
1370 {
1371 gimple bounds_def_stmt;
1372 tree bounds = gimple_call_arg (stmt, 1);
1373 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1374 bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1375 if (bounds_def_stmt
1376 && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1377 gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1378 gimple_plf (stmt, STMT_NECESSARY));
1379 }
1380 }
1381
1382 /* If GSI is not necessary then remove it. */
1383 if (!gimple_plf (stmt, STMT_NECESSARY))
1384 {
1385 /* Keep clobbers that we can keep live live. */
1386 if (gimple_clobber_p (stmt))
1387 {
1388 ssa_op_iter iter;
1389 use_operand_p use_p;
1390 bool dead = false;
1391 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1392 {
1393 tree name = USE_FROM_PTR (use_p);
1394 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1395 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1396 {
1397 dead = true;
1398 break;
1399 }
1400 }
1401 if (!dead)
1402 continue;
1403 }
1404 if (!is_gimple_debug (stmt))
1405 something_changed = true;
1406 remove_dead_stmt (&gsi, bb);
1407 }
1408 else if (is_gimple_call (stmt))
1409 {
1410 tree name = gimple_call_lhs (stmt);
1411
1412 notice_special_calls (as_a <gcall *> (stmt));
1413
1414 /* When LHS of var = call (); is dead, simplify it into
1415 call (); saving one operand. */
1416 if (name
1417 && TREE_CODE (name) == SSA_NAME
1418 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1419 /* Avoid doing so for allocation calls which we
1420 did not mark as necessary, it will confuse the
1421 special logic we apply to malloc/free pair removal. */
1422 && (!(call = gimple_call_fndecl (stmt))
1423 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1424 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1425 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1426 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1427 && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA
1428 && (DECL_FUNCTION_CODE (call)
1429 != BUILT_IN_ALLOCA_WITH_ALIGN)))
1430 /* Avoid doing so for bndret calls for the same reason. */
1431 && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1432 {
1433 something_changed = true;
1434 if (dump_file && (dump_flags & TDF_DETAILS))
1435 {
1436 fprintf (dump_file, "Deleting LHS of call: ");
1437 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1438 fprintf (dump_file, "\n");
1439 }
1440
1441 gimple_call_set_lhs (stmt, NULL_TREE);
1442 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1443 update_stmt (stmt);
1444 release_ssa_name (name);
1445
1446 /* GOMP_SIMD_LANE without lhs is not needed. */
1447 if (gimple_call_internal_p (stmt)
1448 && gimple_call_internal_fn (stmt) == IFN_GOMP_SIMD_LANE)
1449 remove_dead_stmt (&gsi, bb);
1450 }
1451 else if (gimple_call_internal_p (stmt))
1452 switch (gimple_call_internal_fn (stmt))
1453 {
1454 case IFN_ADD_OVERFLOW:
1455 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1456 break;
1457 case IFN_SUB_OVERFLOW:
1458 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1459 break;
1460 case IFN_MUL_OVERFLOW:
1461 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1462 break;
1463 default:
1464 break;
1465 }
1466 }
1467 }
1468 }
1469
1470 h.release ();
1471
1472 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1473 rendered some PHI nodes unreachable while they are still in use.
1474 Mark them for renaming. */
1475 if (cfg_altered)
1476 {
1477 basic_block prev_bb;
1478
1479 find_unreachable_blocks ();
1480
1481 /* Delete all unreachable basic blocks in reverse dominator order. */
1482 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1483 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1484 {
1485 prev_bb = bb->prev_bb;
1486
1487 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1488 || !(bb->flags & BB_REACHABLE))
1489 {
1490 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1491 gsi_next (&gsi))
1492 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1493 {
1494 bool found = false;
1495 imm_use_iterator iter;
1496
1497 FOR_EACH_IMM_USE_STMT (stmt, iter,
1498 gimple_phi_result (gsi.phi ()))
1499 {
1500 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1501 continue;
1502 if (gimple_code (stmt) == GIMPLE_PHI
1503 || gimple_plf (stmt, STMT_NECESSARY))
1504 {
1505 found = true;
1506 BREAK_FROM_IMM_USE_STMT (iter);
1507 }
1508 }
1509 if (found)
1510 mark_virtual_phi_result_for_renaming (gsi.phi ());
1511 }
1512
1513 if (!(bb->flags & BB_REACHABLE))
1514 {
1515 /* Speed up the removal of blocks that don't
1516 dominate others. Walking backwards, this should
1517 be the common case. ??? Do we need to recompute
1518 dominators because of cfg_altered? */
1519 if (!MAY_HAVE_DEBUG_STMTS
1520 || !first_dom_son (CDI_DOMINATORS, bb))
1521 delete_basic_block (bb);
1522 else
1523 {
1524 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1525
1526 while (h.length ())
1527 {
1528 bb = h.pop ();
1529 prev_bb = bb->prev_bb;
1530 /* Rearrangements to the CFG may have failed
1531 to update the dominators tree, so that
1532 formerly-dominated blocks are now
1533 otherwise reachable. */
1534 if (!!(bb->flags & BB_REACHABLE))
1535 continue;
1536 delete_basic_block (bb);
1537 }
1538
1539 h.release ();
1540 }
1541 }
1542 }
1543 }
1544 }
1545 FOR_EACH_BB_FN (bb, cfun)
1546 {
1547 /* Remove dead PHI nodes. */
1548 something_changed |= remove_dead_phis (bb);
1549 }
1550
1551 return something_changed;
1552 }
1553
1554
1555 /* Print out removed statement statistics. */
1556
1557 static void
1558 print_stats (void)
1559 {
1560 float percg;
1561
1562 percg = ((float) stats.removed / (float) stats.total) * 100;
1563 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1564 stats.removed, stats.total, (int) percg);
1565
1566 if (stats.total_phis == 0)
1567 percg = 0;
1568 else
1569 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1570
1571 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1572 stats.removed_phis, stats.total_phis, (int) percg);
1573 }
1574
1575 /* Initialization for this pass. Set up the used data structures. */
1576
1577 static void
1578 tree_dce_init (bool aggressive)
1579 {
1580 memset ((void *) &stats, 0, sizeof (stats));
1581
1582 if (aggressive)
1583 {
1584 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1585 bitmap_clear (last_stmt_necessary);
1586 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1587 bitmap_clear (bb_contains_live_stmts);
1588 }
1589
1590 processed = sbitmap_alloc (num_ssa_names + 1);
1591 bitmap_clear (processed);
1592
1593 worklist.create (64);
1594 cfg_altered = false;
1595 }
1596
1597 /* Cleanup after this pass. */
1598
1599 static void
1600 tree_dce_done (bool aggressive)
1601 {
1602 if (aggressive)
1603 {
1604 delete cd;
1605 sbitmap_free (visited_control_parents);
1606 sbitmap_free (last_stmt_necessary);
1607 sbitmap_free (bb_contains_live_stmts);
1608 bb_contains_live_stmts = NULL;
1609 }
1610
1611 sbitmap_free (processed);
1612
1613 worklist.release ();
1614 }
1615
1616 /* Main routine to eliminate dead code.
1617
1618 AGGRESSIVE controls the aggressiveness of the algorithm.
1619 In conservative mode, we ignore control dependence and simply declare
1620 all but the most trivially dead branches necessary. This mode is fast.
1621 In aggressive mode, control dependences are taken into account, which
1622 results in more dead code elimination, but at the cost of some time.
1623
1624 FIXME: Aggressive mode before PRE doesn't work currently because
1625 the dominance info is not invalidated after DCE1. This is
1626 not an issue right now because we only run aggressive DCE
1627 as the last tree SSA pass, but keep this in mind when you
1628 start experimenting with pass ordering. */
1629
1630 static unsigned int
1631 perform_tree_ssa_dce (bool aggressive)
1632 {
1633 bool something_changed = 0;
1634
1635 calculate_dominance_info (CDI_DOMINATORS);
1636
1637 /* Preheaders are needed for SCEV to work.
1638 Simple lateches and recorded exits improve chances that loop will
1639 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1640 if (aggressive)
1641 loop_optimizer_init (LOOPS_NORMAL
1642 | LOOPS_HAVE_RECORDED_EXITS);
1643
1644 tree_dce_init (aggressive);
1645
1646 if (aggressive)
1647 {
1648 /* Compute control dependence. */
1649 calculate_dominance_info (CDI_POST_DOMINATORS);
1650 cd = new control_dependences (create_edge_list ());
1651
1652 visited_control_parents =
1653 sbitmap_alloc (last_basic_block_for_fn (cfun));
1654 bitmap_clear (visited_control_parents);
1655
1656 mark_dfs_back_edges ();
1657 }
1658
1659 find_obviously_necessary_stmts (aggressive);
1660
1661 if (aggressive)
1662 loop_optimizer_finalize ();
1663
1664 longest_chain = 0;
1665 total_chain = 0;
1666 nr_walks = 0;
1667 chain_ovfl = false;
1668 visited = BITMAP_ALLOC (NULL);
1669 propagate_necessity (aggressive);
1670 BITMAP_FREE (visited);
1671
1672 something_changed |= eliminate_unnecessary_stmts ();
1673 something_changed |= cfg_altered;
1674
1675 /* We do not update postdominators, so free them unconditionally. */
1676 free_dominance_info (CDI_POST_DOMINATORS);
1677
1678 /* If we removed paths in the CFG, then we need to update
1679 dominators as well. I haven't investigated the possibility
1680 of incrementally updating dominators. */
1681 if (cfg_altered)
1682 free_dominance_info (CDI_DOMINATORS);
1683
1684 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1685 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1686
1687 /* Debugging dumps. */
1688 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1689 print_stats ();
1690
1691 tree_dce_done (aggressive);
1692
1693 if (something_changed)
1694 {
1695 free_numbers_of_iterations_estimates ();
1696 if (scev_initialized_p ())
1697 scev_reset ();
1698 return TODO_update_ssa | TODO_cleanup_cfg;
1699 }
1700 return 0;
1701 }
1702
1703 /* Pass entry points. */
1704 static unsigned int
1705 tree_ssa_dce (void)
1706 {
1707 return perform_tree_ssa_dce (/*aggressive=*/false);
1708 }
1709
1710 static unsigned int
1711 tree_ssa_cd_dce (void)
1712 {
1713 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1714 }
1715
1716 namespace {
1717
1718 const pass_data pass_data_dce =
1719 {
1720 GIMPLE_PASS, /* type */
1721 "dce", /* name */
1722 OPTGROUP_NONE, /* optinfo_flags */
1723 TV_TREE_DCE, /* tv_id */
1724 ( PROP_cfg | PROP_ssa ), /* properties_required */
1725 0, /* properties_provided */
1726 0, /* properties_destroyed */
1727 0, /* todo_flags_start */
1728 0, /* todo_flags_finish */
1729 };
1730
1731 class pass_dce : public gimple_opt_pass
1732 {
1733 public:
1734 pass_dce (gcc::context *ctxt)
1735 : gimple_opt_pass (pass_data_dce, ctxt)
1736 {}
1737
1738 /* opt_pass methods: */
1739 opt_pass * clone () { return new pass_dce (m_ctxt); }
1740 virtual bool gate (function *) { return flag_tree_dce != 0; }
1741 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1742
1743 }; // class pass_dce
1744
1745 } // anon namespace
1746
1747 gimple_opt_pass *
1748 make_pass_dce (gcc::context *ctxt)
1749 {
1750 return new pass_dce (ctxt);
1751 }
1752
1753 namespace {
1754
1755 const pass_data pass_data_cd_dce =
1756 {
1757 GIMPLE_PASS, /* type */
1758 "cddce", /* name */
1759 OPTGROUP_NONE, /* optinfo_flags */
1760 TV_TREE_CD_DCE, /* tv_id */
1761 ( PROP_cfg | PROP_ssa ), /* properties_required */
1762 0, /* properties_provided */
1763 0, /* properties_destroyed */
1764 0, /* todo_flags_start */
1765 0, /* todo_flags_finish */
1766 };
1767
1768 class pass_cd_dce : public gimple_opt_pass
1769 {
1770 public:
1771 pass_cd_dce (gcc::context *ctxt)
1772 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1773 {}
1774
1775 /* opt_pass methods: */
1776 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1777 virtual bool gate (function *) { return flag_tree_dce != 0; }
1778 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1779
1780 }; // class pass_cd_dce
1781
1782 } // anon namespace
1783
1784 gimple_opt_pass *
1785 make_pass_cd_dce (gcc::context *ctxt)
1786 {
1787 return new pass_cd_dce (ctxt);
1788 }