]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-dce.c
[SFN] not-quite-boilerplate changes in preparation to introduce nonbind markers
[thirdparty/gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2017 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* Dead code elimination.
24
25 References:
26
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
37
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "cfghooks.h"
53 #include "tree-pass.h"
54 #include "ssa.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "cfganal.h"
59 #include "tree-eh.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "tree-cfg.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-chkp.h"
69 #include "tree-ssa-propagate.h"
70 #include "gimple-fold.h"
71
72 static struct stmt_stats
73 {
74 int total;
75 int total_phis;
76 int removed;
77 int removed_phis;
78 } stats;
79
80 #define STMT_NECESSARY GF_PLF_1
81
82 static vec<gimple *> worklist;
83
84 /* Vector indicating an SSA name has already been processed and marked
85 as necessary. */
86 static sbitmap processed;
87
88 /* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90 static sbitmap last_stmt_necessary;
91
92 /* Vector indicating that BB contains statements that are live. */
93 static sbitmap bb_contains_live_stmts;
94
95 /* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
97
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
101 on the Ith edge. */
102 static control_dependences *cd;
103
104 /* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106 static sbitmap visited_control_parents;
107
108 /* TRUE if this pass alters the CFG (by removing control statements).
109 FALSE otherwise.
110
111 If this pass alters the CFG, then it will arrange for the dominators
112 to be recomputed. */
113 static bool cfg_altered;
114
115 /* When non-NULL holds map from basic block index into the postorder. */
116 static int *bb_postorder;
117
118
119 /* If STMT is not already marked necessary, mark it, and add it to the
120 worklist if ADD_TO_WORKLIST is true. */
121
122 static inline void
123 mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
124 {
125 gcc_assert (stmt);
126
127 if (gimple_plf (stmt, STMT_NECESSARY))
128 return;
129
130 if (dump_file && (dump_flags & TDF_DETAILS))
131 {
132 fprintf (dump_file, "Marking useful stmt: ");
133 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
134 fprintf (dump_file, "\n");
135 }
136
137 gimple_set_plf (stmt, STMT_NECESSARY, true);
138 if (add_to_worklist)
139 worklist.safe_push (stmt);
140 if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
141 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
142 }
143
144
145 /* Mark the statement defining operand OP as necessary. */
146
147 static inline void
148 mark_operand_necessary (tree op)
149 {
150 gimple *stmt;
151 int ver;
152
153 gcc_assert (op);
154
155 ver = SSA_NAME_VERSION (op);
156 if (bitmap_bit_p (processed, ver))
157 {
158 stmt = SSA_NAME_DEF_STMT (op);
159 gcc_assert (gimple_nop_p (stmt)
160 || gimple_plf (stmt, STMT_NECESSARY));
161 return;
162 }
163 bitmap_set_bit (processed, ver);
164
165 stmt = SSA_NAME_DEF_STMT (op);
166 gcc_assert (stmt);
167
168 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
169 return;
170
171 if (dump_file && (dump_flags & TDF_DETAILS))
172 {
173 fprintf (dump_file, "marking necessary through ");
174 print_generic_expr (dump_file, op);
175 fprintf (dump_file, " stmt ");
176 print_gimple_stmt (dump_file, stmt, 0);
177 }
178
179 gimple_set_plf (stmt, STMT_NECESSARY, true);
180 if (bb_contains_live_stmts)
181 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
182 worklist.safe_push (stmt);
183 }
184
185
186 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
187 it can make other statements necessary.
188
189 If AGGRESSIVE is false, control statements are conservatively marked as
190 necessary. */
191
192 static void
193 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
194 {
195 /* With non-call exceptions, we have to assume that all statements could
196 throw. If a statement could throw, it can be deemed necessary. */
197 if (cfun->can_throw_non_call_exceptions
198 && !cfun->can_delete_dead_exceptions
199 && stmt_could_throw_p (stmt))
200 {
201 mark_stmt_necessary (stmt, true);
202 return;
203 }
204
205 /* Statements that are implicitly live. Most function calls, asm
206 and return statements are required. Labels and GIMPLE_BIND nodes
207 are kept because they are control flow, and we have no way of
208 knowing whether they can be removed. DCE can eliminate all the
209 other statements in a block, and CFG can then remove the block
210 and labels. */
211 switch (gimple_code (stmt))
212 {
213 case GIMPLE_PREDICT:
214 case GIMPLE_LABEL:
215 mark_stmt_necessary (stmt, false);
216 return;
217
218 case GIMPLE_ASM:
219 case GIMPLE_RESX:
220 case GIMPLE_RETURN:
221 mark_stmt_necessary (stmt, true);
222 return;
223
224 case GIMPLE_CALL:
225 {
226 tree callee = gimple_call_fndecl (stmt);
227 if (callee != NULL_TREE
228 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
229 switch (DECL_FUNCTION_CODE (callee))
230 {
231 case BUILT_IN_MALLOC:
232 case BUILT_IN_ALIGNED_ALLOC:
233 case BUILT_IN_CALLOC:
234 CASE_BUILT_IN_ALLOCA:
235 case BUILT_IN_STRDUP:
236 case BUILT_IN_STRNDUP:
237 return;
238
239 default:;
240 }
241 /* Most, but not all function calls are required. Function calls that
242 produce no result and have no side effects (i.e. const pure
243 functions) are unnecessary. */
244 if (gimple_has_side_effects (stmt))
245 {
246 mark_stmt_necessary (stmt, true);
247 return;
248 }
249 if (!gimple_call_lhs (stmt))
250 return;
251 break;
252 }
253
254 case GIMPLE_DEBUG:
255 /* Debug temps without a value are not useful. ??? If we could
256 easily locate the debug temp bind stmt for a use thereof,
257 would could refrain from marking all debug temps here, and
258 mark them only if they're used. */
259 if (gimple_debug_nonbind_marker_p (stmt)
260 || !gimple_debug_bind_p (stmt)
261 || gimple_debug_bind_has_value_p (stmt)
262 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
263 mark_stmt_necessary (stmt, false);
264 return;
265
266 case GIMPLE_GOTO:
267 gcc_assert (!simple_goto_p (stmt));
268 mark_stmt_necessary (stmt, true);
269 return;
270
271 case GIMPLE_COND:
272 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
273 /* Fall through. */
274
275 case GIMPLE_SWITCH:
276 if (! aggressive)
277 mark_stmt_necessary (stmt, true);
278 break;
279
280 case GIMPLE_ASSIGN:
281 if (gimple_clobber_p (stmt))
282 return;
283 break;
284
285 default:
286 break;
287 }
288
289 /* If the statement has volatile operands, it needs to be preserved.
290 Same for statements that can alter control flow in unpredictable
291 ways. */
292 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
293 {
294 mark_stmt_necessary (stmt, true);
295 return;
296 }
297
298 if (stmt_may_clobber_global_p (stmt))
299 {
300 mark_stmt_necessary (stmt, true);
301 return;
302 }
303
304 return;
305 }
306
307
308 /* Mark the last statement of BB as necessary. */
309
310 static void
311 mark_last_stmt_necessary (basic_block bb)
312 {
313 gimple *stmt = last_stmt (bb);
314
315 bitmap_set_bit (last_stmt_necessary, bb->index);
316 bitmap_set_bit (bb_contains_live_stmts, bb->index);
317
318 /* We actually mark the statement only if it is a control statement. */
319 if (stmt && is_ctrl_stmt (stmt))
320 mark_stmt_necessary (stmt, true);
321 }
322
323
324 /* Mark control dependent edges of BB as necessary. We have to do this only
325 once for each basic block so we set the appropriate bit after we're done.
326
327 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
328
329 static void
330 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
331 {
332 bitmap_iterator bi;
333 unsigned edge_number;
334 bool skipped = false;
335
336 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
337
338 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
339 return;
340
341 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
342 0, edge_number, bi)
343 {
344 basic_block cd_bb = cd->get_edge_src (edge_number);
345
346 if (ignore_self && cd_bb == bb)
347 {
348 skipped = true;
349 continue;
350 }
351
352 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
353 mark_last_stmt_necessary (cd_bb);
354 }
355
356 if (!skipped)
357 bitmap_set_bit (visited_control_parents, bb->index);
358 }
359
360
361 /* Find obviously necessary statements. These are things like most function
362 calls, and stores to file level variables.
363
364 If EL is NULL, control statements are conservatively marked as
365 necessary. Otherwise it contains the list of edges used by control
366 dependence analysis. */
367
368 static void
369 find_obviously_necessary_stmts (bool aggressive)
370 {
371 basic_block bb;
372 gimple_stmt_iterator gsi;
373 edge e;
374 gimple *phi, *stmt;
375 int flags;
376
377 FOR_EACH_BB_FN (bb, cfun)
378 {
379 /* PHI nodes are never inherently necessary. */
380 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
381 {
382 phi = gsi_stmt (gsi);
383 gimple_set_plf (phi, STMT_NECESSARY, false);
384 }
385
386 /* Check all statements in the block. */
387 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
388 {
389 stmt = gsi_stmt (gsi);
390 gimple_set_plf (stmt, STMT_NECESSARY, false);
391 mark_stmt_if_obviously_necessary (stmt, aggressive);
392 }
393 }
394
395 /* Pure and const functions are finite and thus have no infinite loops in
396 them. */
397 flags = flags_from_decl_or_type (current_function_decl);
398 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
399 return;
400
401 /* Prevent the empty possibly infinite loops from being removed. */
402 if (aggressive)
403 {
404 struct loop *loop;
405 if (mark_irreducible_loops ())
406 FOR_EACH_BB_FN (bb, cfun)
407 {
408 edge_iterator ei;
409 FOR_EACH_EDGE (e, ei, bb->succs)
410 if ((e->flags & EDGE_DFS_BACK)
411 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
412 {
413 if (dump_file)
414 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
415 e->src->index, e->dest->index);
416 mark_control_dependent_edges_necessary (e->dest, false);
417 }
418 }
419
420 FOR_EACH_LOOP (loop, 0)
421 if (!finite_loop_p (loop))
422 {
423 if (dump_file)
424 fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
425 mark_control_dependent_edges_necessary (loop->latch, false);
426 }
427 }
428 }
429
430
431 /* Return true if REF is based on an aliased base, otherwise false. */
432
433 static bool
434 ref_may_be_aliased (tree ref)
435 {
436 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
437 while (handled_component_p (ref))
438 ref = TREE_OPERAND (ref, 0);
439 if (TREE_CODE (ref) == MEM_REF
440 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
441 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
442 return !(DECL_P (ref)
443 && !may_be_aliased (ref));
444 }
445
446 static bitmap visited = NULL;
447 static unsigned int longest_chain = 0;
448 static unsigned int total_chain = 0;
449 static unsigned int nr_walks = 0;
450 static bool chain_ovfl = false;
451
452 /* Worker for the walker that marks reaching definitions of REF,
453 which is based on a non-aliased decl, necessary. It returns
454 true whenever the defining statement of the current VDEF is
455 a kill for REF, as no dominating may-defs are necessary for REF
456 anymore. DATA points to the basic-block that contains the
457 stmt that refers to REF. */
458
459 static bool
460 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
461 {
462 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
463
464 /* All stmts we visit are necessary. */
465 if (! gimple_clobber_p (def_stmt))
466 mark_operand_necessary (vdef);
467
468 /* If the stmt lhs kills ref, then we can stop walking. */
469 if (gimple_has_lhs (def_stmt)
470 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
471 /* The assignment is not necessarily carried out if it can throw
472 and we can catch it in the current function where we could inspect
473 the previous value.
474 ??? We only need to care about the RHS throwing. For aggregate
475 assignments or similar calls and non-call exceptions the LHS
476 might throw as well. */
477 && !stmt_can_throw_internal (def_stmt))
478 {
479 tree base, lhs = gimple_get_lhs (def_stmt);
480 HOST_WIDE_INT size, offset, max_size;
481 bool reverse;
482 ao_ref_base (ref);
483 base
484 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
485 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
486 so base == refd->base does not always hold. */
487 if (base == ref->base)
488 {
489 /* For a must-alias check we need to be able to constrain
490 the accesses properly. */
491 if (size != -1 && size == max_size
492 && ref->max_size != -1)
493 {
494 if (offset <= ref->offset
495 && offset + size >= ref->offset + ref->max_size)
496 return true;
497 }
498 /* Or they need to be exactly the same. */
499 else if (ref->ref
500 /* Make sure there is no induction variable involved
501 in the references (gcc.c-torture/execute/pr42142.c).
502 The simplest way is to check if the kill dominates
503 the use. */
504 /* But when both are in the same block we cannot
505 easily tell whether we came from a backedge
506 unless we decide to compute stmt UIDs
507 (see PR58246). */
508 && (basic_block) data != gimple_bb (def_stmt)
509 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
510 gimple_bb (def_stmt))
511 && operand_equal_p (ref->ref, lhs, 0))
512 return true;
513 }
514 }
515
516 /* Otherwise keep walking. */
517 return false;
518 }
519
520 static void
521 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
522 {
523 unsigned int chain;
524 ao_ref refd;
525 gcc_assert (!chain_ovfl);
526 ao_ref_init (&refd, ref);
527 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
528 mark_aliased_reaching_defs_necessary_1,
529 gimple_bb (stmt), NULL);
530 if (chain > longest_chain)
531 longest_chain = chain;
532 total_chain += chain;
533 nr_walks++;
534 }
535
536 /* Worker for the walker that marks reaching definitions of REF, which
537 is not based on a non-aliased decl. For simplicity we need to end
538 up marking all may-defs necessary that are not based on a non-aliased
539 decl. The only job of this walker is to skip may-defs based on
540 a non-aliased decl. */
541
542 static bool
543 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
544 tree vdef, void *data ATTRIBUTE_UNUSED)
545 {
546 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
547
548 /* We have to skip already visited (and thus necessary) statements
549 to make the chaining work after we dropped back to simple mode. */
550 if (chain_ovfl
551 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
552 {
553 gcc_assert (gimple_nop_p (def_stmt)
554 || gimple_plf (def_stmt, STMT_NECESSARY));
555 return false;
556 }
557
558 /* We want to skip stores to non-aliased variables. */
559 if (!chain_ovfl
560 && gimple_assign_single_p (def_stmt))
561 {
562 tree lhs = gimple_assign_lhs (def_stmt);
563 if (!ref_may_be_aliased (lhs))
564 return false;
565 }
566
567 /* We want to skip statments that do not constitute stores but have
568 a virtual definition. */
569 if (is_gimple_call (def_stmt))
570 {
571 tree callee = gimple_call_fndecl (def_stmt);
572 if (callee != NULL_TREE
573 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
574 switch (DECL_FUNCTION_CODE (callee))
575 {
576 case BUILT_IN_MALLOC:
577 case BUILT_IN_ALIGNED_ALLOC:
578 case BUILT_IN_CALLOC:
579 CASE_BUILT_IN_ALLOCA:
580 case BUILT_IN_FREE:
581 return false;
582
583 default:;
584 }
585 }
586
587 if (! gimple_clobber_p (def_stmt))
588 mark_operand_necessary (vdef);
589
590 return false;
591 }
592
593 static void
594 mark_all_reaching_defs_necessary (gimple *stmt)
595 {
596 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
597 mark_all_reaching_defs_necessary_1, NULL, &visited);
598 }
599
600 /* Return true for PHI nodes with one or identical arguments
601 can be removed. */
602 static bool
603 degenerate_phi_p (gimple *phi)
604 {
605 unsigned int i;
606 tree op = gimple_phi_arg_def (phi, 0);
607 for (i = 1; i < gimple_phi_num_args (phi); i++)
608 if (gimple_phi_arg_def (phi, i) != op)
609 return false;
610 return true;
611 }
612
613 /* Propagate necessity using the operands of necessary statements.
614 Process the uses on each statement in the worklist, and add all
615 feeding statements which contribute to the calculation of this
616 value to the worklist.
617
618 In conservative mode, EL is NULL. */
619
620 static void
621 propagate_necessity (bool aggressive)
622 {
623 gimple *stmt;
624
625 if (dump_file && (dump_flags & TDF_DETAILS))
626 fprintf (dump_file, "\nProcessing worklist:\n");
627
628 while (worklist.length () > 0)
629 {
630 /* Take STMT from worklist. */
631 stmt = worklist.pop ();
632
633 if (dump_file && (dump_flags & TDF_DETAILS))
634 {
635 fprintf (dump_file, "processing: ");
636 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
637 fprintf (dump_file, "\n");
638 }
639
640 if (aggressive)
641 {
642 /* Mark the last statement of the basic blocks on which the block
643 containing STMT is control dependent, but only if we haven't
644 already done so. */
645 basic_block bb = gimple_bb (stmt);
646 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
647 && !bitmap_bit_p (visited_control_parents, bb->index))
648 mark_control_dependent_edges_necessary (bb, false);
649 }
650
651 if (gimple_code (stmt) == GIMPLE_PHI
652 /* We do not process virtual PHI nodes nor do we track their
653 necessity. */
654 && !virtual_operand_p (gimple_phi_result (stmt)))
655 {
656 /* PHI nodes are somewhat special in that each PHI alternative has
657 data and control dependencies. All the statements feeding the
658 PHI node's arguments are always necessary. In aggressive mode,
659 we also consider the control dependent edges leading to the
660 predecessor block associated with each PHI alternative as
661 necessary. */
662 gphi *phi = as_a <gphi *> (stmt);
663 size_t k;
664
665 for (k = 0; k < gimple_phi_num_args (stmt); k++)
666 {
667 tree arg = PHI_ARG_DEF (stmt, k);
668 if (TREE_CODE (arg) == SSA_NAME)
669 mark_operand_necessary (arg);
670 }
671
672 /* For PHI operands it matters from where the control flow arrives
673 to the BB. Consider the following example:
674
675 a=exp1;
676 b=exp2;
677 if (test)
678 ;
679 else
680 ;
681 c=PHI(a,b)
682
683 We need to mark control dependence of the empty basic blocks, since they
684 contains computation of PHI operands.
685
686 Doing so is too restrictive in the case the predecestor block is in
687 the loop. Consider:
688
689 if (b)
690 {
691 int i;
692 for (i = 0; i<1000; ++i)
693 ;
694 j = 0;
695 }
696 return j;
697
698 There is PHI for J in the BB containing return statement.
699 In this case the control dependence of predecestor block (that is
700 within the empty loop) also contains the block determining number
701 of iterations of the block that would prevent removing of empty
702 loop in this case.
703
704 This scenario can be avoided by splitting critical edges.
705 To save the critical edge splitting pass we identify how the control
706 dependence would look like if the edge was split.
707
708 Consider the modified CFG created from current CFG by splitting
709 edge B->C. In the postdominance tree of modified CFG, C' is
710 always child of C. There are two cases how chlids of C' can look
711 like:
712
713 1) C' is leaf
714
715 In this case the only basic block C' is control dependent on is B.
716
717 2) C' has single child that is B
718
719 In this case control dependence of C' is same as control
720 dependence of B in original CFG except for block B itself.
721 (since C' postdominate B in modified CFG)
722
723 Now how to decide what case happens? There are two basic options:
724
725 a) C postdominate B. Then C immediately postdominate B and
726 case 2 happens iff there is no other way from B to C except
727 the edge B->C.
728
729 There is other way from B to C iff there is succesor of B that
730 is not postdominated by B. Testing this condition is somewhat
731 expensive, because we need to iterate all succesors of B.
732 We are safe to assume that this does not happen: we will mark B
733 as needed when processing the other path from B to C that is
734 conrol dependent on B and marking control dependencies of B
735 itself is harmless because they will be processed anyway after
736 processing control statement in B.
737
738 b) C does not postdominate B. Always case 1 happens since there is
739 path from C to exit that does not go through B and thus also C'. */
740
741 if (aggressive && !degenerate_phi_p (stmt))
742 {
743 for (k = 0; k < gimple_phi_num_args (stmt); k++)
744 {
745 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
746
747 if (gimple_bb (stmt)
748 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
749 {
750 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
751 mark_last_stmt_necessary (arg_bb);
752 }
753 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
754 && !bitmap_bit_p (visited_control_parents,
755 arg_bb->index))
756 mark_control_dependent_edges_necessary (arg_bb, true);
757 }
758 }
759 }
760 else
761 {
762 /* Propagate through the operands. Examine all the USE, VUSE and
763 VDEF operands in this statement. Mark all the statements
764 which feed this statement's uses as necessary. */
765 ssa_op_iter iter;
766 tree use;
767
768 /* If this is a call to free which is directly fed by an
769 allocation function do not mark that necessary through
770 processing the argument. */
771 if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
772 {
773 tree ptr = gimple_call_arg (stmt, 0);
774 gimple *def_stmt;
775 tree def_callee;
776 /* If the pointer we free is defined by an allocation
777 function do not add the call to the worklist. */
778 if (TREE_CODE (ptr) == SSA_NAME
779 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
780 && (def_callee = gimple_call_fndecl (def_stmt))
781 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
782 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
783 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
784 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
785 {
786 gimple *bounds_def_stmt;
787 tree bounds;
788
789 /* For instrumented calls we should also check used
790 bounds are returned by the same allocation call. */
791 if (!gimple_call_with_bounds_p (stmt)
792 || ((bounds = gimple_call_arg (stmt, 1))
793 && TREE_CODE (bounds) == SSA_NAME
794 && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
795 && chkp_gimple_call_builtin_p (bounds_def_stmt,
796 BUILT_IN_CHKP_BNDRET)
797 && gimple_call_arg (bounds_def_stmt, 0) == ptr))
798 continue;
799 }
800 }
801
802 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
803 mark_operand_necessary (use);
804
805 use = gimple_vuse (stmt);
806 if (!use)
807 continue;
808
809 /* If we dropped to simple mode make all immediately
810 reachable definitions necessary. */
811 if (chain_ovfl)
812 {
813 mark_all_reaching_defs_necessary (stmt);
814 continue;
815 }
816
817 /* For statements that may load from memory (have a VUSE) we
818 have to mark all reaching (may-)definitions as necessary.
819 We partition this task into two cases:
820 1) explicit loads based on decls that are not aliased
821 2) implicit loads (like calls) and explicit loads not
822 based on decls that are not aliased (like indirect
823 references or loads from globals)
824 For 1) we mark all reaching may-defs as necessary, stopping
825 at dominating kills. For 2) we want to mark all dominating
826 references necessary, but non-aliased ones which we handle
827 in 1). By keeping a global visited bitmap for references
828 we walk for 2) we avoid quadratic behavior for those. */
829
830 if (is_gimple_call (stmt))
831 {
832 tree callee = gimple_call_fndecl (stmt);
833 unsigned i;
834
835 /* Calls to functions that are merely acting as barriers
836 or that only store to memory do not make any previous
837 stores necessary. */
838 if (callee != NULL_TREE
839 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
840 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
841 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
842 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
843 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
844 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
845 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
846 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
847 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
848 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
849 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
850 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
851 continue;
852
853 /* Calls implicitly load from memory, their arguments
854 in addition may explicitly perform memory loads. */
855 mark_all_reaching_defs_necessary (stmt);
856 for (i = 0; i < gimple_call_num_args (stmt); ++i)
857 {
858 tree arg = gimple_call_arg (stmt, i);
859 if (TREE_CODE (arg) == SSA_NAME
860 || is_gimple_min_invariant (arg))
861 continue;
862 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
863 arg = TREE_OPERAND (arg, 0);
864 if (!ref_may_be_aliased (arg))
865 mark_aliased_reaching_defs_necessary (stmt, arg);
866 }
867 }
868 else if (gimple_assign_single_p (stmt))
869 {
870 tree rhs;
871 /* If this is a load mark things necessary. */
872 rhs = gimple_assign_rhs1 (stmt);
873 if (TREE_CODE (rhs) != SSA_NAME
874 && !is_gimple_min_invariant (rhs)
875 && TREE_CODE (rhs) != CONSTRUCTOR)
876 {
877 if (!ref_may_be_aliased (rhs))
878 mark_aliased_reaching_defs_necessary (stmt, rhs);
879 else
880 mark_all_reaching_defs_necessary (stmt);
881 }
882 }
883 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
884 {
885 tree rhs = gimple_return_retval (return_stmt);
886 /* A return statement may perform a load. */
887 if (rhs
888 && TREE_CODE (rhs) != SSA_NAME
889 && !is_gimple_min_invariant (rhs)
890 && TREE_CODE (rhs) != CONSTRUCTOR)
891 {
892 if (!ref_may_be_aliased (rhs))
893 mark_aliased_reaching_defs_necessary (stmt, rhs);
894 else
895 mark_all_reaching_defs_necessary (stmt);
896 }
897 }
898 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
899 {
900 unsigned i;
901 mark_all_reaching_defs_necessary (stmt);
902 /* Inputs may perform loads. */
903 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
904 {
905 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
906 if (TREE_CODE (op) != SSA_NAME
907 && !is_gimple_min_invariant (op)
908 && TREE_CODE (op) != CONSTRUCTOR
909 && !ref_may_be_aliased (op))
910 mark_aliased_reaching_defs_necessary (stmt, op);
911 }
912 }
913 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
914 {
915 /* The beginning of a transaction is a memory barrier. */
916 /* ??? If we were really cool, we'd only be a barrier
917 for the memories touched within the transaction. */
918 mark_all_reaching_defs_necessary (stmt);
919 }
920 else
921 gcc_unreachable ();
922
923 /* If we over-used our alias oracle budget drop to simple
924 mode. The cost metric allows quadratic behavior
925 (number of uses times number of may-defs queries) up to
926 a constant maximal number of queries and after that falls back to
927 super-linear complexity. */
928 if (/* Constant but quadratic for small functions. */
929 total_chain > 128 * 128
930 /* Linear in the number of may-defs. */
931 && total_chain > 32 * longest_chain
932 /* Linear in the number of uses. */
933 && total_chain > nr_walks * 32)
934 {
935 chain_ovfl = true;
936 if (visited)
937 bitmap_clear (visited);
938 }
939 }
940 }
941 }
942
943 /* Remove dead PHI nodes from block BB. */
944
945 static bool
946 remove_dead_phis (basic_block bb)
947 {
948 bool something_changed = false;
949 gphi *phi;
950 gphi_iterator gsi;
951
952 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
953 {
954 stats.total_phis++;
955 phi = gsi.phi ();
956
957 /* We do not track necessity of virtual PHI nodes. Instead do
958 very simple dead PHI removal here. */
959 if (virtual_operand_p (gimple_phi_result (phi)))
960 {
961 /* Virtual PHI nodes with one or identical arguments
962 can be removed. */
963 if (degenerate_phi_p (phi))
964 {
965 tree vdef = gimple_phi_result (phi);
966 tree vuse = gimple_phi_arg_def (phi, 0);
967
968 use_operand_p use_p;
969 imm_use_iterator iter;
970 gimple *use_stmt;
971 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
972 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
973 SET_USE (use_p, vuse);
974 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
975 && TREE_CODE (vuse) == SSA_NAME)
976 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
977 }
978 else
979 gimple_set_plf (phi, STMT_NECESSARY, true);
980 }
981
982 if (!gimple_plf (phi, STMT_NECESSARY))
983 {
984 something_changed = true;
985 if (dump_file && (dump_flags & TDF_DETAILS))
986 {
987 fprintf (dump_file, "Deleting : ");
988 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
989 fprintf (dump_file, "\n");
990 }
991
992 remove_phi_node (&gsi, true);
993 stats.removed_phis++;
994 continue;
995 }
996
997 gsi_next (&gsi);
998 }
999 return something_changed;
1000 }
1001
1002
1003 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1004 containing I so that we don't have to look it up. */
1005
1006 static void
1007 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
1008 {
1009 gimple *stmt = gsi_stmt (*i);
1010
1011 if (dump_file && (dump_flags & TDF_DETAILS))
1012 {
1013 fprintf (dump_file, "Deleting : ");
1014 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1015 fprintf (dump_file, "\n");
1016 }
1017
1018 stats.removed++;
1019
1020 /* If we have determined that a conditional branch statement contributes
1021 nothing to the program, then we not only remove it, but we need to update
1022 the CFG. We can chose any of edges out of BB as long as we are sure to not
1023 close infinite loops. This is done by always choosing the edge closer to
1024 exit in inverted_post_order_compute order. */
1025 if (is_ctrl_stmt (stmt))
1026 {
1027 edge_iterator ei;
1028 edge e = NULL, e2;
1029
1030 /* See if there is only one non-abnormal edge. */
1031 if (single_succ_p (bb))
1032 e = single_succ_edge (bb);
1033 /* Otherwise chose one that is closer to bb with live statement in it.
1034 To be able to chose one, we compute inverted post order starting from
1035 all BBs with live statements. */
1036 if (!e)
1037 {
1038 if (!bb_postorder)
1039 {
1040 auto_vec<int, 20> postorder;
1041 inverted_post_order_compute (&postorder,
1042 &bb_contains_live_stmts);
1043 bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1044 for (unsigned int i = 0; i < postorder.length (); ++i)
1045 bb_postorder[postorder[i]] = i;
1046 }
1047 FOR_EACH_EDGE (e2, ei, bb->succs)
1048 if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1049 || bb_postorder [e->dest->index]
1050 < bb_postorder [e2->dest->index])
1051 e = e2;
1052 }
1053 gcc_assert (e);
1054 e->probability = profile_probability::always ();
1055
1056 /* The edge is no longer associated with a conditional, so it does
1057 not have TRUE/FALSE flags.
1058 We are also safe to drop EH/ABNORMAL flags and turn them into
1059 normal control flow, because we know that all the destinations (including
1060 those odd edges) are equivalent for program execution. */
1061 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1062
1063 /* The lone outgoing edge from BB will be a fallthru edge. */
1064 e->flags |= EDGE_FALLTHRU;
1065
1066 /* Remove the remaining outgoing edges. */
1067 for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1068 if (e != e2)
1069 {
1070 cfg_altered = true;
1071 /* If we made a BB unconditionally exit a loop or removed
1072 an entry into an irreducible region, then this transform
1073 alters the set of BBs in the loop. Schedule a fixup. */
1074 if (loop_exit_edge_p (bb->loop_father, e)
1075 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1076 loops_state_set (LOOPS_NEED_FIXUP);
1077 remove_edge (e2);
1078 }
1079 else
1080 ei_next (&ei);
1081 }
1082
1083 /* If this is a store into a variable that is being optimized away,
1084 add a debug bind stmt if possible. */
1085 if (MAY_HAVE_DEBUG_BIND_STMTS
1086 && gimple_assign_single_p (stmt)
1087 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1088 {
1089 tree lhs = gimple_assign_lhs (stmt);
1090 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1091 && !DECL_IGNORED_P (lhs)
1092 && is_gimple_reg_type (TREE_TYPE (lhs))
1093 && !is_global_var (lhs)
1094 && !DECL_HAS_VALUE_EXPR_P (lhs))
1095 {
1096 tree rhs = gimple_assign_rhs1 (stmt);
1097 gdebug *note
1098 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1099 gsi_insert_after (i, note, GSI_SAME_STMT);
1100 }
1101 }
1102
1103 unlink_stmt_vdef (stmt);
1104 gsi_remove (i, true);
1105 release_defs (stmt);
1106 }
1107
1108 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1109 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1110
1111 static tree
1112 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1113 {
1114 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1115 *walk_subtrees = 0;
1116 if (*tp == (tree) data)
1117 return *tp;
1118 return NULL_TREE;
1119 }
1120
1121 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1122 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1123 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1124 uses. */
1125
1126 static void
1127 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1128 enum tree_code subcode)
1129 {
1130 gimple *stmt = gsi_stmt (*gsi);
1131 tree lhs = gimple_call_lhs (stmt);
1132
1133 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1134 return;
1135
1136 imm_use_iterator imm_iter;
1137 use_operand_p use_p;
1138 bool has_debug_uses = false;
1139 bool has_realpart_uses = false;
1140 bool has_other_uses = false;
1141 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1142 {
1143 gimple *use_stmt = USE_STMT (use_p);
1144 if (is_gimple_debug (use_stmt))
1145 has_debug_uses = true;
1146 else if (is_gimple_assign (use_stmt)
1147 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1148 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1149 has_realpart_uses = true;
1150 else
1151 {
1152 has_other_uses = true;
1153 break;
1154 }
1155 }
1156
1157 if (!has_realpart_uses || has_other_uses)
1158 return;
1159
1160 tree arg0 = gimple_call_arg (stmt, 0);
1161 tree arg1 = gimple_call_arg (stmt, 1);
1162 location_t loc = gimple_location (stmt);
1163 tree type = TREE_TYPE (TREE_TYPE (lhs));
1164 tree utype = type;
1165 if (!TYPE_UNSIGNED (type))
1166 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1167 tree result = fold_build2_loc (loc, subcode, utype,
1168 fold_convert_loc (loc, utype, arg0),
1169 fold_convert_loc (loc, utype, arg1));
1170 result = fold_convert_loc (loc, type, result);
1171
1172 if (has_debug_uses)
1173 {
1174 gimple *use_stmt;
1175 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1176 {
1177 if (!gimple_debug_bind_p (use_stmt))
1178 continue;
1179 tree v = gimple_debug_bind_get_value (use_stmt);
1180 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1181 {
1182 gimple_debug_bind_reset_value (use_stmt);
1183 update_stmt (use_stmt);
1184 }
1185 }
1186 }
1187
1188 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1189 result = drop_tree_overflow (result);
1190 tree overflow = build_zero_cst (type);
1191 tree ctype = build_complex_type (type);
1192 if (TREE_CODE (result) == INTEGER_CST)
1193 result = build_complex (ctype, result, overflow);
1194 else
1195 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1196 ctype, result, overflow);
1197
1198 if (dump_file && (dump_flags & TDF_DETAILS))
1199 {
1200 fprintf (dump_file, "Transforming call: ");
1201 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1202 fprintf (dump_file, "because the overflow result is never used into: ");
1203 print_generic_stmt (dump_file, result, TDF_SLIM);
1204 fprintf (dump_file, "\n");
1205 }
1206
1207 if (!update_call_from_tree (gsi, result))
1208 gimplify_and_update_call_from_tree (gsi, result);
1209 }
1210
1211 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1212 contributes nothing to the program, and can be deleted. */
1213
1214 static bool
1215 eliminate_unnecessary_stmts (void)
1216 {
1217 bool something_changed = false;
1218 basic_block bb;
1219 gimple_stmt_iterator gsi, psi;
1220 gimple *stmt;
1221 tree call;
1222 vec<basic_block> h;
1223
1224 if (dump_file && (dump_flags & TDF_DETAILS))
1225 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1226
1227 clear_special_calls ();
1228
1229 /* Walking basic blocks and statements in reverse order avoids
1230 releasing SSA names before any other DEFs that refer to them are
1231 released. This helps avoid loss of debug information, as we get
1232 a chance to propagate all RHSs of removed SSAs into debug uses,
1233 rather than only the latest ones. E.g., consider:
1234
1235 x_3 = y_1 + z_2;
1236 a_5 = x_3 - b_4;
1237 # DEBUG a => a_5
1238
1239 If we were to release x_3 before a_5, when we reached a_5 and
1240 tried to substitute it into the debug stmt, we'd see x_3 there,
1241 but x_3's DEF, type, etc would have already been disconnected.
1242 By going backwards, the debug stmt first changes to:
1243
1244 # DEBUG a => x_3 - b_4
1245
1246 and then to:
1247
1248 # DEBUG a => y_1 + z_2 - b_4
1249
1250 as desired. */
1251 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1252 h = get_all_dominated_blocks (CDI_DOMINATORS,
1253 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1254
1255 while (h.length ())
1256 {
1257 bb = h.pop ();
1258
1259 /* Remove dead statements. */
1260 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1261 {
1262 stmt = gsi_stmt (gsi);
1263
1264 psi = gsi;
1265 gsi_prev (&psi);
1266
1267 stats.total++;
1268
1269 /* We can mark a call to free as not necessary if the
1270 defining statement of its argument is not necessary
1271 (and thus is getting removed). */
1272 if (gimple_plf (stmt, STMT_NECESSARY)
1273 && gimple_call_builtin_p (stmt, BUILT_IN_FREE))
1274 {
1275 tree ptr = gimple_call_arg (stmt, 0);
1276 if (TREE_CODE (ptr) == SSA_NAME)
1277 {
1278 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1279 if (!gimple_nop_p (def_stmt)
1280 && !gimple_plf (def_stmt, STMT_NECESSARY))
1281 gimple_set_plf (stmt, STMT_NECESSARY, false);
1282 }
1283 /* We did not propagate necessity for free calls fed
1284 by allocation function to allow unnecessary
1285 alloc-free sequence elimination. For instrumented
1286 calls it also means we did not mark bounds producer
1287 as necessary and it is time to do it in case free
1288 call is not removed. */
1289 if (gimple_call_with_bounds_p (stmt))
1290 {
1291 gimple *bounds_def_stmt;
1292 tree bounds = gimple_call_arg (stmt, 1);
1293 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1294 bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1295 if (bounds_def_stmt
1296 && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1297 gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1298 gimple_plf (stmt, STMT_NECESSARY));
1299 }
1300 }
1301
1302 /* If GSI is not necessary then remove it. */
1303 if (!gimple_plf (stmt, STMT_NECESSARY))
1304 {
1305 /* Keep clobbers that we can keep live live. */
1306 if (gimple_clobber_p (stmt))
1307 {
1308 ssa_op_iter iter;
1309 use_operand_p use_p;
1310 bool dead = false;
1311 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1312 {
1313 tree name = USE_FROM_PTR (use_p);
1314 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1315 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1316 {
1317 dead = true;
1318 break;
1319 }
1320 }
1321 if (!dead)
1322 continue;
1323 }
1324 if (!is_gimple_debug (stmt))
1325 something_changed = true;
1326 remove_dead_stmt (&gsi, bb);
1327 }
1328 else if (is_gimple_call (stmt))
1329 {
1330 tree name = gimple_call_lhs (stmt);
1331
1332 notice_special_calls (as_a <gcall *> (stmt));
1333
1334 /* When LHS of var = call (); is dead, simplify it into
1335 call (); saving one operand. */
1336 if (name
1337 && TREE_CODE (name) == SSA_NAME
1338 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1339 /* Avoid doing so for allocation calls which we
1340 did not mark as necessary, it will confuse the
1341 special logic we apply to malloc/free pair removal. */
1342 && (!(call = gimple_call_fndecl (stmt))
1343 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1344 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1345 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1346 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1347 && !ALLOCA_FUNCTION_CODE_P
1348 (DECL_FUNCTION_CODE (call))))
1349 /* Avoid doing so for bndret calls for the same reason. */
1350 && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1351 {
1352 something_changed = true;
1353 if (dump_file && (dump_flags & TDF_DETAILS))
1354 {
1355 fprintf (dump_file, "Deleting LHS of call: ");
1356 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1357 fprintf (dump_file, "\n");
1358 }
1359
1360 gimple_call_set_lhs (stmt, NULL_TREE);
1361 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1362 update_stmt (stmt);
1363 release_ssa_name (name);
1364
1365 /* GOMP_SIMD_LANE or ASAN_POISON without lhs is not
1366 needed. */
1367 if (gimple_call_internal_p (stmt))
1368 switch (gimple_call_internal_fn (stmt))
1369 {
1370 case IFN_GOMP_SIMD_LANE:
1371 case IFN_ASAN_POISON:
1372 remove_dead_stmt (&gsi, bb);
1373 break;
1374 default:
1375 break;
1376 }
1377 }
1378 else if (gimple_call_internal_p (stmt))
1379 switch (gimple_call_internal_fn (stmt))
1380 {
1381 case IFN_ADD_OVERFLOW:
1382 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1383 break;
1384 case IFN_SUB_OVERFLOW:
1385 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1386 break;
1387 case IFN_MUL_OVERFLOW:
1388 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1389 break;
1390 default:
1391 break;
1392 }
1393 }
1394 }
1395 }
1396
1397 h.release ();
1398
1399 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1400 rendered some PHI nodes unreachable while they are still in use.
1401 Mark them for renaming. */
1402 if (cfg_altered)
1403 {
1404 basic_block prev_bb;
1405
1406 find_unreachable_blocks ();
1407
1408 /* Delete all unreachable basic blocks in reverse dominator order. */
1409 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1410 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1411 {
1412 prev_bb = bb->prev_bb;
1413
1414 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1415 || !(bb->flags & BB_REACHABLE))
1416 {
1417 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1418 gsi_next (&gsi))
1419 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1420 {
1421 bool found = false;
1422 imm_use_iterator iter;
1423
1424 FOR_EACH_IMM_USE_STMT (stmt, iter,
1425 gimple_phi_result (gsi.phi ()))
1426 {
1427 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1428 continue;
1429 if (gimple_code (stmt) == GIMPLE_PHI
1430 || gimple_plf (stmt, STMT_NECESSARY))
1431 {
1432 found = true;
1433 BREAK_FROM_IMM_USE_STMT (iter);
1434 }
1435 }
1436 if (found)
1437 mark_virtual_phi_result_for_renaming (gsi.phi ());
1438 }
1439
1440 if (!(bb->flags & BB_REACHABLE))
1441 {
1442 /* Speed up the removal of blocks that don't
1443 dominate others. Walking backwards, this should
1444 be the common case. ??? Do we need to recompute
1445 dominators because of cfg_altered? */
1446 if (!first_dom_son (CDI_DOMINATORS, bb))
1447 delete_basic_block (bb);
1448 else
1449 {
1450 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1451
1452 while (h.length ())
1453 {
1454 bb = h.pop ();
1455 prev_bb = bb->prev_bb;
1456 /* Rearrangements to the CFG may have failed
1457 to update the dominators tree, so that
1458 formerly-dominated blocks are now
1459 otherwise reachable. */
1460 if (!!(bb->flags & BB_REACHABLE))
1461 continue;
1462 delete_basic_block (bb);
1463 }
1464
1465 h.release ();
1466 }
1467 }
1468 }
1469 }
1470 }
1471 FOR_EACH_BB_FN (bb, cfun)
1472 {
1473 /* Remove dead PHI nodes. */
1474 something_changed |= remove_dead_phis (bb);
1475 }
1476
1477 if (bb_postorder)
1478 free (bb_postorder);
1479 bb_postorder = NULL;
1480
1481 return something_changed;
1482 }
1483
1484
1485 /* Print out removed statement statistics. */
1486
1487 static void
1488 print_stats (void)
1489 {
1490 float percg;
1491
1492 percg = ((float) stats.removed / (float) stats.total) * 100;
1493 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1494 stats.removed, stats.total, (int) percg);
1495
1496 if (stats.total_phis == 0)
1497 percg = 0;
1498 else
1499 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1500
1501 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1502 stats.removed_phis, stats.total_phis, (int) percg);
1503 }
1504
1505 /* Initialization for this pass. Set up the used data structures. */
1506
1507 static void
1508 tree_dce_init (bool aggressive)
1509 {
1510 memset ((void *) &stats, 0, sizeof (stats));
1511
1512 if (aggressive)
1513 {
1514 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1515 bitmap_clear (last_stmt_necessary);
1516 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1517 bitmap_clear (bb_contains_live_stmts);
1518 }
1519
1520 processed = sbitmap_alloc (num_ssa_names + 1);
1521 bitmap_clear (processed);
1522
1523 worklist.create (64);
1524 cfg_altered = false;
1525 }
1526
1527 /* Cleanup after this pass. */
1528
1529 static void
1530 tree_dce_done (bool aggressive)
1531 {
1532 if (aggressive)
1533 {
1534 delete cd;
1535 sbitmap_free (visited_control_parents);
1536 sbitmap_free (last_stmt_necessary);
1537 sbitmap_free (bb_contains_live_stmts);
1538 bb_contains_live_stmts = NULL;
1539 }
1540
1541 sbitmap_free (processed);
1542
1543 worklist.release ();
1544 }
1545
1546 /* Main routine to eliminate dead code.
1547
1548 AGGRESSIVE controls the aggressiveness of the algorithm.
1549 In conservative mode, we ignore control dependence and simply declare
1550 all but the most trivially dead branches necessary. This mode is fast.
1551 In aggressive mode, control dependences are taken into account, which
1552 results in more dead code elimination, but at the cost of some time.
1553
1554 FIXME: Aggressive mode before PRE doesn't work currently because
1555 the dominance info is not invalidated after DCE1. This is
1556 not an issue right now because we only run aggressive DCE
1557 as the last tree SSA pass, but keep this in mind when you
1558 start experimenting with pass ordering. */
1559
1560 static unsigned int
1561 perform_tree_ssa_dce (bool aggressive)
1562 {
1563 bool something_changed = 0;
1564
1565 calculate_dominance_info (CDI_DOMINATORS);
1566
1567 /* Preheaders are needed for SCEV to work.
1568 Simple lateches and recorded exits improve chances that loop will
1569 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1570 bool in_loop_pipeline = scev_initialized_p ();
1571 if (aggressive && ! in_loop_pipeline)
1572 {
1573 scev_initialize ();
1574 loop_optimizer_init (LOOPS_NORMAL
1575 | LOOPS_HAVE_RECORDED_EXITS);
1576 }
1577
1578 tree_dce_init (aggressive);
1579
1580 if (aggressive)
1581 {
1582 /* Compute control dependence. */
1583 calculate_dominance_info (CDI_POST_DOMINATORS);
1584 cd = new control_dependences ();
1585
1586 visited_control_parents =
1587 sbitmap_alloc (last_basic_block_for_fn (cfun));
1588 bitmap_clear (visited_control_parents);
1589
1590 mark_dfs_back_edges ();
1591 }
1592
1593 find_obviously_necessary_stmts (aggressive);
1594
1595 if (aggressive && ! in_loop_pipeline)
1596 {
1597 loop_optimizer_finalize ();
1598 scev_finalize ();
1599 }
1600
1601 longest_chain = 0;
1602 total_chain = 0;
1603 nr_walks = 0;
1604 chain_ovfl = false;
1605 visited = BITMAP_ALLOC (NULL);
1606 propagate_necessity (aggressive);
1607 BITMAP_FREE (visited);
1608
1609 something_changed |= eliminate_unnecessary_stmts ();
1610 something_changed |= cfg_altered;
1611
1612 /* We do not update postdominators, so free them unconditionally. */
1613 free_dominance_info (CDI_POST_DOMINATORS);
1614
1615 /* If we removed paths in the CFG, then we need to update
1616 dominators as well. I haven't investigated the possibility
1617 of incrementally updating dominators. */
1618 if (cfg_altered)
1619 free_dominance_info (CDI_DOMINATORS);
1620
1621 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1622 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1623
1624 /* Debugging dumps. */
1625 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1626 print_stats ();
1627
1628 tree_dce_done (aggressive);
1629
1630 if (something_changed)
1631 {
1632 free_numbers_of_iterations_estimates (cfun);
1633 if (in_loop_pipeline)
1634 scev_reset ();
1635 return TODO_update_ssa | TODO_cleanup_cfg;
1636 }
1637 return 0;
1638 }
1639
1640 /* Pass entry points. */
1641 static unsigned int
1642 tree_ssa_dce (void)
1643 {
1644 return perform_tree_ssa_dce (/*aggressive=*/false);
1645 }
1646
1647 static unsigned int
1648 tree_ssa_cd_dce (void)
1649 {
1650 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1651 }
1652
1653 namespace {
1654
1655 const pass_data pass_data_dce =
1656 {
1657 GIMPLE_PASS, /* type */
1658 "dce", /* name */
1659 OPTGROUP_NONE, /* optinfo_flags */
1660 TV_TREE_DCE, /* tv_id */
1661 ( PROP_cfg | PROP_ssa ), /* properties_required */
1662 0, /* properties_provided */
1663 0, /* properties_destroyed */
1664 0, /* todo_flags_start */
1665 0, /* todo_flags_finish */
1666 };
1667
1668 class pass_dce : public gimple_opt_pass
1669 {
1670 public:
1671 pass_dce (gcc::context *ctxt)
1672 : gimple_opt_pass (pass_data_dce, ctxt)
1673 {}
1674
1675 /* opt_pass methods: */
1676 opt_pass * clone () { return new pass_dce (m_ctxt); }
1677 virtual bool gate (function *) { return flag_tree_dce != 0; }
1678 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1679
1680 }; // class pass_dce
1681
1682 } // anon namespace
1683
1684 gimple_opt_pass *
1685 make_pass_dce (gcc::context *ctxt)
1686 {
1687 return new pass_dce (ctxt);
1688 }
1689
1690 namespace {
1691
1692 const pass_data pass_data_cd_dce =
1693 {
1694 GIMPLE_PASS, /* type */
1695 "cddce", /* name */
1696 OPTGROUP_NONE, /* optinfo_flags */
1697 TV_TREE_CD_DCE, /* tv_id */
1698 ( PROP_cfg | PROP_ssa ), /* properties_required */
1699 0, /* properties_provided */
1700 0, /* properties_destroyed */
1701 0, /* todo_flags_start */
1702 0, /* todo_flags_finish */
1703 };
1704
1705 class pass_cd_dce : public gimple_opt_pass
1706 {
1707 public:
1708 pass_cd_dce (gcc::context *ctxt)
1709 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1710 {}
1711
1712 /* opt_pass methods: */
1713 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1714 virtual bool gate (function *) { return flag_tree_dce != 0; }
1715 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1716
1717 }; // class pass_cd_dce
1718
1719 } // anon namespace
1720
1721 gimple_opt_pass *
1722 make_pass_cd_dce (gcc::context *ctxt)
1723 {
1724 return new pass_cd_dce (ctxt);
1725 }
1726
1727
1728 /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
1729 is consumed by this function. The function has linear complexity in
1730 the number of dead stmts with a constant factor like the average SSA
1731 use operands number. */
1732
1733 void
1734 simple_dce_from_worklist (bitmap worklist)
1735 {
1736 while (! bitmap_empty_p (worklist))
1737 {
1738 /* Pop item. */
1739 unsigned i = bitmap_first_set_bit (worklist);
1740 bitmap_clear_bit (worklist, i);
1741
1742 tree def = ssa_name (i);
1743 /* Removed by somebody else or still in use. */
1744 if (! def || ! has_zero_uses (def))
1745 continue;
1746
1747 gimple *t = SSA_NAME_DEF_STMT (def);
1748 if (gimple_has_side_effects (t))
1749 continue;
1750
1751 /* Add uses to the worklist. */
1752 ssa_op_iter iter;
1753 use_operand_p use_p;
1754 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
1755 {
1756 tree use = USE_FROM_PTR (use_p);
1757 if (TREE_CODE (use) == SSA_NAME
1758 && ! SSA_NAME_IS_DEFAULT_DEF (use))
1759 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
1760 }
1761
1762 /* Remove stmt. */
1763 if (dump_file && (dump_flags & TDF_DETAILS))
1764 {
1765 fprintf (dump_file, "Removing dead stmt:");
1766 print_gimple_stmt (dump_file, t, 0);
1767 }
1768 gimple_stmt_iterator gsi = gsi_for_stmt (t);
1769 if (gimple_code (t) == GIMPLE_PHI)
1770 remove_phi_node (&gsi, true);
1771 else
1772 {
1773 gsi_remove (&gsi, true);
1774 release_defs (t);
1775 }
1776 }
1777 }