]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-dce.c
x86: Remove "%!" before ret
[thirdparty/gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2021 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* Dead code elimination.
24
25 References:
26
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
37
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "cfghooks.h"
53 #include "tree-pass.h"
54 #include "ssa.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "cfganal.h"
59 #include "tree-eh.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "tree-cfg.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-ssa-propagate.h"
69 #include "gimple-fold.h"
70 #include "tree-ssa.h"
71
72 static struct stmt_stats
73 {
74 int total;
75 int total_phis;
76 int removed;
77 int removed_phis;
78 } stats;
79
80 #define STMT_NECESSARY GF_PLF_1
81
82 static vec<gimple *> worklist;
83
84 /* Vector indicating an SSA name has already been processed and marked
85 as necessary. */
86 static sbitmap processed;
87
88 /* Vector indicating that the last statement of a basic block has already
89 been marked as necessary. */
90 static sbitmap last_stmt_necessary;
91
92 /* Vector indicating that BB contains statements that are live. */
93 static sbitmap bb_contains_live_stmts;
94
95 /* Before we can determine whether a control branch is dead, we need to
96 compute which blocks are control dependent on which edges.
97
98 We expect each block to be control dependent on very few edges so we
99 use a bitmap for each block recording its edges. An array holds the
100 bitmap. The Ith bit in the bitmap is set if that block is dependent
101 on the Ith edge. */
102 static control_dependences *cd;
103
104 /* Vector indicating that a basic block has already had all the edges
105 processed that it is control dependent on. */
106 static sbitmap visited_control_parents;
107
108 /* TRUE if this pass alters the CFG (by removing control statements).
109 FALSE otherwise.
110
111 If this pass alters the CFG, then it will arrange for the dominators
112 to be recomputed. */
113 static bool cfg_altered;
114
115 /* When non-NULL holds map from basic block index into the postorder. */
116 static int *bb_postorder;
117
118
119 /* True if we should treat any stmt with a vdef as necessary. */
120
121 static inline bool
122 keep_all_vdefs_p ()
123 {
124 return optimize_debug;
125 }
126
127 /* If STMT is not already marked necessary, mark it, and add it to the
128 worklist if ADD_TO_WORKLIST is true. */
129
130 static inline void
131 mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
132 {
133 gcc_assert (stmt);
134
135 if (gimple_plf (stmt, STMT_NECESSARY))
136 return;
137
138 if (dump_file && (dump_flags & TDF_DETAILS))
139 {
140 fprintf (dump_file, "Marking useful stmt: ");
141 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
142 fprintf (dump_file, "\n");
143 }
144
145 gimple_set_plf (stmt, STMT_NECESSARY, true);
146 if (add_to_worklist)
147 worklist.safe_push (stmt);
148 if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
149 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
150 }
151
152
153 /* Mark the statement defining operand OP as necessary. */
154
155 static inline void
156 mark_operand_necessary (tree op)
157 {
158 gimple *stmt;
159 int ver;
160
161 gcc_assert (op);
162
163 ver = SSA_NAME_VERSION (op);
164 if (bitmap_bit_p (processed, ver))
165 {
166 stmt = SSA_NAME_DEF_STMT (op);
167 gcc_assert (gimple_nop_p (stmt)
168 || gimple_plf (stmt, STMT_NECESSARY));
169 return;
170 }
171 bitmap_set_bit (processed, ver);
172
173 stmt = SSA_NAME_DEF_STMT (op);
174 gcc_assert (stmt);
175
176 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
177 return;
178
179 if (dump_file && (dump_flags & TDF_DETAILS))
180 {
181 fprintf (dump_file, "marking necessary through ");
182 print_generic_expr (dump_file, op);
183 fprintf (dump_file, " stmt ");
184 print_gimple_stmt (dump_file, stmt, 0);
185 }
186
187 gimple_set_plf (stmt, STMT_NECESSARY, true);
188 if (bb_contains_live_stmts)
189 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
190 worklist.safe_push (stmt);
191 }
192
193
194 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
195 it can make other statements necessary.
196
197 If AGGRESSIVE is false, control statements are conservatively marked as
198 necessary. */
199
200 static void
201 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
202 {
203 /* Statements that are implicitly live. Most function calls, asm
204 and return statements are required. Labels and GIMPLE_BIND nodes
205 are kept because they are control flow, and we have no way of
206 knowing whether they can be removed. DCE can eliminate all the
207 other statements in a block, and CFG can then remove the block
208 and labels. */
209 switch (gimple_code (stmt))
210 {
211 case GIMPLE_PREDICT:
212 case GIMPLE_LABEL:
213 mark_stmt_necessary (stmt, false);
214 return;
215
216 case GIMPLE_ASM:
217 case GIMPLE_RESX:
218 case GIMPLE_RETURN:
219 mark_stmt_necessary (stmt, true);
220 return;
221
222 case GIMPLE_CALL:
223 {
224 tree callee = gimple_call_fndecl (stmt);
225 if (callee != NULL_TREE
226 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
227 switch (DECL_FUNCTION_CODE (callee))
228 {
229 case BUILT_IN_MALLOC:
230 case BUILT_IN_ALIGNED_ALLOC:
231 case BUILT_IN_CALLOC:
232 CASE_BUILT_IN_ALLOCA:
233 case BUILT_IN_STRDUP:
234 case BUILT_IN_STRNDUP:
235 case BUILT_IN_GOMP_ALLOC:
236 return;
237
238 default:;
239 }
240
241 if (callee != NULL_TREE
242 && flag_allocation_dce
243 && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
244 return;
245
246 /* IFN_GOACC_LOOP calls are necessary in that they are used to
247 represent parameter (i.e. step, bound) of a lowered OpenACC
248 partitioned loop. But this kind of partitioned loop might not
249 survive from aggressive loop removal for it has loop exit and
250 is assumed to be finite. Therefore, we need to explicitly mark
251 these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
252 if (gimple_call_internal_p (stmt, IFN_GOACC_LOOP))
253 {
254 mark_stmt_necessary (stmt, true);
255 return;
256 }
257 break;
258 }
259
260 case GIMPLE_DEBUG:
261 /* Debug temps without a value are not useful. ??? If we could
262 easily locate the debug temp bind stmt for a use thereof,
263 would could refrain from marking all debug temps here, and
264 mark them only if they're used. */
265 if (gimple_debug_nonbind_marker_p (stmt)
266 || !gimple_debug_bind_p (stmt)
267 || gimple_debug_bind_has_value_p (stmt)
268 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
269 mark_stmt_necessary (stmt, false);
270 return;
271
272 case GIMPLE_GOTO:
273 gcc_assert (!simple_goto_p (stmt));
274 mark_stmt_necessary (stmt, true);
275 return;
276
277 case GIMPLE_COND:
278 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
279 /* Fall through. */
280
281 case GIMPLE_SWITCH:
282 if (! aggressive)
283 mark_stmt_necessary (stmt, true);
284 break;
285
286 case GIMPLE_ASSIGN:
287 if (gimple_clobber_p (stmt))
288 return;
289 break;
290
291 default:
292 break;
293 }
294
295 /* If the statement has volatile operands, it needs to be preserved.
296 Same for statements that can alter control flow in unpredictable
297 ways. */
298 if (gimple_has_side_effects (stmt) || is_ctrl_altering_stmt (stmt))
299 {
300 mark_stmt_necessary (stmt, true);
301 return;
302 }
303
304 /* If a statement could throw, it can be deemed necessary unless we
305 are allowed to remove dead EH. Test this after checking for
306 new/delete operators since we always elide their EH. */
307 if (!cfun->can_delete_dead_exceptions
308 && stmt_could_throw_p (cfun, stmt))
309 {
310 mark_stmt_necessary (stmt, true);
311 return;
312 }
313
314 if ((gimple_vdef (stmt) && keep_all_vdefs_p ())
315 || stmt_may_clobber_global_p (stmt))
316 {
317 mark_stmt_necessary (stmt, true);
318 return;
319 }
320
321 return;
322 }
323
324
325 /* Mark the last statement of BB as necessary. */
326
327 static void
328 mark_last_stmt_necessary (basic_block bb)
329 {
330 gimple *stmt = last_stmt (bb);
331
332 bitmap_set_bit (last_stmt_necessary, bb->index);
333 bitmap_set_bit (bb_contains_live_stmts, bb->index);
334
335 /* We actually mark the statement only if it is a control statement. */
336 if (stmt && is_ctrl_stmt (stmt))
337 mark_stmt_necessary (stmt, true);
338 }
339
340
341 /* Mark control dependent edges of BB as necessary. We have to do this only
342 once for each basic block so we set the appropriate bit after we're done.
343
344 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
345
346 static void
347 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
348 {
349 bitmap_iterator bi;
350 unsigned edge_number;
351 bool skipped = false;
352
353 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
354
355 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
356 return;
357
358 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
359 0, edge_number, bi)
360 {
361 basic_block cd_bb = cd->get_edge_src (edge_number);
362
363 if (ignore_self && cd_bb == bb)
364 {
365 skipped = true;
366 continue;
367 }
368
369 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
370 mark_last_stmt_necessary (cd_bb);
371 }
372
373 if (!skipped)
374 bitmap_set_bit (visited_control_parents, bb->index);
375 }
376
377
378 /* Find obviously necessary statements. These are things like most function
379 calls, and stores to file level variables.
380
381 If EL is NULL, control statements are conservatively marked as
382 necessary. Otherwise it contains the list of edges used by control
383 dependence analysis. */
384
385 static void
386 find_obviously_necessary_stmts (bool aggressive)
387 {
388 basic_block bb;
389 gimple_stmt_iterator gsi;
390 edge e;
391 gimple *phi, *stmt;
392 int flags;
393
394 FOR_EACH_BB_FN (bb, cfun)
395 {
396 /* PHI nodes are never inherently necessary. */
397 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
398 {
399 phi = gsi_stmt (gsi);
400 gimple_set_plf (phi, STMT_NECESSARY, false);
401 }
402
403 /* Check all statements in the block. */
404 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
405 {
406 stmt = gsi_stmt (gsi);
407 gimple_set_plf (stmt, STMT_NECESSARY, false);
408 mark_stmt_if_obviously_necessary (stmt, aggressive);
409 }
410 }
411
412 /* Pure and const functions are finite and thus have no infinite loops in
413 them. */
414 flags = flags_from_decl_or_type (current_function_decl);
415 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
416 return;
417
418 /* Prevent the empty possibly infinite loops from being removed. This is
419 needed to make the logic in remove_dead_stmt work to identify the
420 correct edge to keep when removing a controlling condition. */
421 if (aggressive)
422 {
423 if (mark_irreducible_loops ())
424 FOR_EACH_BB_FN (bb, cfun)
425 {
426 edge_iterator ei;
427 FOR_EACH_EDGE (e, ei, bb->succs)
428 if ((e->flags & EDGE_DFS_BACK)
429 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
430 {
431 if (dump_file)
432 fprintf (dump_file, "Marking back edge of irreducible "
433 "loop %i->%i\n", e->src->index, e->dest->index);
434 mark_control_dependent_edges_necessary (e->dest, false);
435 }
436 }
437
438 for (auto loop : loops_list (cfun, 0))
439 /* For loops without an exit do not mark any condition. */
440 if (loop->exits->next->e && !finite_loop_p (loop))
441 {
442 if (dump_file)
443 fprintf (dump_file, "cannot prove finiteness of loop %i\n",
444 loop->num);
445 mark_control_dependent_edges_necessary (loop->latch, false);
446 }
447 }
448 }
449
450
451 /* Return true if REF is based on an aliased base, otherwise false. */
452
453 static bool
454 ref_may_be_aliased (tree ref)
455 {
456 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
457 while (handled_component_p (ref))
458 ref = TREE_OPERAND (ref, 0);
459 if ((TREE_CODE (ref) == MEM_REF || TREE_CODE (ref) == TARGET_MEM_REF)
460 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
461 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
462 return !(DECL_P (ref)
463 && !may_be_aliased (ref));
464 }
465
466 static bitmap visited = NULL;
467 static unsigned int longest_chain = 0;
468 static unsigned int total_chain = 0;
469 static unsigned int nr_walks = 0;
470 static bool chain_ovfl = false;
471
472 /* Worker for the walker that marks reaching definitions of REF,
473 which is based on a non-aliased decl, necessary. It returns
474 true whenever the defining statement of the current VDEF is
475 a kill for REF, as no dominating may-defs are necessary for REF
476 anymore. DATA points to the basic-block that contains the
477 stmt that refers to REF. */
478
479 static bool
480 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
481 {
482 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
483
484 /* All stmts we visit are necessary. */
485 if (! gimple_clobber_p (def_stmt))
486 mark_operand_necessary (vdef);
487
488 /* If the stmt lhs kills ref, then we can stop walking. */
489 if (gimple_has_lhs (def_stmt)
490 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
491 /* The assignment is not necessarily carried out if it can throw
492 and we can catch it in the current function where we could inspect
493 the previous value.
494 ??? We only need to care about the RHS throwing. For aggregate
495 assignments or similar calls and non-call exceptions the LHS
496 might throw as well. */
497 && !stmt_can_throw_internal (cfun, def_stmt))
498 {
499 tree base, lhs = gimple_get_lhs (def_stmt);
500 poly_int64 size, offset, max_size;
501 bool reverse;
502 ao_ref_base (ref);
503 base
504 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
505 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
506 so base == refd->base does not always hold. */
507 if (base == ref->base)
508 {
509 /* For a must-alias check we need to be able to constrain
510 the accesses properly. */
511 if (known_eq (size, max_size)
512 && known_subrange_p (ref->offset, ref->max_size, offset, size))
513 return true;
514 /* Or they need to be exactly the same. */
515 else if (ref->ref
516 /* Make sure there is no induction variable involved
517 in the references (gcc.c-torture/execute/pr42142.c).
518 The simplest way is to check if the kill dominates
519 the use. */
520 /* But when both are in the same block we cannot
521 easily tell whether we came from a backedge
522 unless we decide to compute stmt UIDs
523 (see PR58246). */
524 && (basic_block) data != gimple_bb (def_stmt)
525 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
526 gimple_bb (def_stmt))
527 && operand_equal_p (ref->ref, lhs, 0))
528 return true;
529 }
530 }
531
532 /* Otherwise keep walking. */
533 return false;
534 }
535
536 static void
537 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
538 {
539 /* Should have been caught before calling this function. */
540 gcc_checking_assert (!keep_all_vdefs_p ());
541
542 unsigned int chain;
543 ao_ref refd;
544 gcc_assert (!chain_ovfl);
545 ao_ref_init (&refd, ref);
546 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
547 mark_aliased_reaching_defs_necessary_1,
548 gimple_bb (stmt), NULL);
549 if (chain > longest_chain)
550 longest_chain = chain;
551 total_chain += chain;
552 nr_walks++;
553 }
554
555 /* Worker for the walker that marks reaching definitions of REF, which
556 is not based on a non-aliased decl. For simplicity we need to end
557 up marking all may-defs necessary that are not based on a non-aliased
558 decl. The only job of this walker is to skip may-defs based on
559 a non-aliased decl. */
560
561 static bool
562 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
563 tree vdef, void *data ATTRIBUTE_UNUSED)
564 {
565 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
566
567 /* We have to skip already visited (and thus necessary) statements
568 to make the chaining work after we dropped back to simple mode. */
569 if (chain_ovfl
570 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
571 {
572 gcc_assert (gimple_nop_p (def_stmt)
573 || gimple_plf (def_stmt, STMT_NECESSARY));
574 return false;
575 }
576
577 /* We want to skip stores to non-aliased variables. */
578 if (!chain_ovfl
579 && gimple_assign_single_p (def_stmt))
580 {
581 tree lhs = gimple_assign_lhs (def_stmt);
582 if (!ref_may_be_aliased (lhs))
583 return false;
584 }
585
586 /* We want to skip statments that do not constitute stores but have
587 a virtual definition. */
588 if (gcall *call = dyn_cast <gcall *> (def_stmt))
589 {
590 tree callee = gimple_call_fndecl (call);
591 if (callee != NULL_TREE
592 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
593 switch (DECL_FUNCTION_CODE (callee))
594 {
595 case BUILT_IN_MALLOC:
596 case BUILT_IN_ALIGNED_ALLOC:
597 case BUILT_IN_CALLOC:
598 CASE_BUILT_IN_ALLOCA:
599 case BUILT_IN_FREE:
600 case BUILT_IN_GOMP_ALLOC:
601 case BUILT_IN_GOMP_FREE:
602 return false;
603
604 default:;
605 }
606
607 if (callee != NULL_TREE
608 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
609 || DECL_IS_OPERATOR_DELETE_P (callee))
610 && gimple_call_from_new_or_delete (call))
611 return false;
612 }
613
614 if (! gimple_clobber_p (def_stmt))
615 mark_operand_necessary (vdef);
616
617 return false;
618 }
619
620 static void
621 mark_all_reaching_defs_necessary (gimple *stmt)
622 {
623 /* Should have been caught before calling this function. */
624 gcc_checking_assert (!keep_all_vdefs_p ());
625 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
626 mark_all_reaching_defs_necessary_1, NULL, &visited);
627 }
628
629 /* Return true for PHI nodes with one or identical arguments
630 can be removed. */
631 static bool
632 degenerate_phi_p (gimple *phi)
633 {
634 unsigned int i;
635 tree op = gimple_phi_arg_def (phi, 0);
636 for (i = 1; i < gimple_phi_num_args (phi); i++)
637 if (gimple_phi_arg_def (phi, i) != op)
638 return false;
639 return true;
640 }
641
642 /* Return that NEW_CALL and DELETE_CALL are a valid pair of new
643 and delete operators. */
644
645 static bool
646 valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
647 {
648 tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
649 tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
650 return valid_new_delete_pair_p (new_asm, delete_asm);
651 }
652
653 /* Propagate necessity using the operands of necessary statements.
654 Process the uses on each statement in the worklist, and add all
655 feeding statements which contribute to the calculation of this
656 value to the worklist.
657
658 In conservative mode, EL is NULL. */
659
660 static void
661 propagate_necessity (bool aggressive)
662 {
663 gimple *stmt;
664
665 if (dump_file && (dump_flags & TDF_DETAILS))
666 fprintf (dump_file, "\nProcessing worklist:\n");
667
668 while (worklist.length () > 0)
669 {
670 /* Take STMT from worklist. */
671 stmt = worklist.pop ();
672
673 if (dump_file && (dump_flags & TDF_DETAILS))
674 {
675 fprintf (dump_file, "processing: ");
676 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
677 fprintf (dump_file, "\n");
678 }
679
680 if (aggressive)
681 {
682 /* Mark the last statement of the basic blocks on which the block
683 containing STMT is control dependent, but only if we haven't
684 already done so. */
685 basic_block bb = gimple_bb (stmt);
686 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
687 && !bitmap_bit_p (visited_control_parents, bb->index))
688 mark_control_dependent_edges_necessary (bb, false);
689 }
690
691 if (gimple_code (stmt) == GIMPLE_PHI
692 /* We do not process virtual PHI nodes nor do we track their
693 necessity. */
694 && !virtual_operand_p (gimple_phi_result (stmt)))
695 {
696 /* PHI nodes are somewhat special in that each PHI alternative has
697 data and control dependencies. All the statements feeding the
698 PHI node's arguments are always necessary. In aggressive mode,
699 we also consider the control dependent edges leading to the
700 predecessor block associated with each PHI alternative as
701 necessary. */
702 gphi *phi = as_a <gphi *> (stmt);
703 size_t k;
704
705 for (k = 0; k < gimple_phi_num_args (stmt); k++)
706 {
707 tree arg = PHI_ARG_DEF (stmt, k);
708 if (TREE_CODE (arg) == SSA_NAME)
709 mark_operand_necessary (arg);
710 }
711
712 /* For PHI operands it matters from where the control flow arrives
713 to the BB. Consider the following example:
714
715 a=exp1;
716 b=exp2;
717 if (test)
718 ;
719 else
720 ;
721 c=PHI(a,b)
722
723 We need to mark control dependence of the empty basic blocks, since they
724 contains computation of PHI operands.
725
726 Doing so is too restrictive in the case the predecestor block is in
727 the loop. Consider:
728
729 if (b)
730 {
731 int i;
732 for (i = 0; i<1000; ++i)
733 ;
734 j = 0;
735 }
736 return j;
737
738 There is PHI for J in the BB containing return statement.
739 In this case the control dependence of predecestor block (that is
740 within the empty loop) also contains the block determining number
741 of iterations of the block that would prevent removing of empty
742 loop in this case.
743
744 This scenario can be avoided by splitting critical edges.
745 To save the critical edge splitting pass we identify how the control
746 dependence would look like if the edge was split.
747
748 Consider the modified CFG created from current CFG by splitting
749 edge B->C. In the postdominance tree of modified CFG, C' is
750 always child of C. There are two cases how chlids of C' can look
751 like:
752
753 1) C' is leaf
754
755 In this case the only basic block C' is control dependent on is B.
756
757 2) C' has single child that is B
758
759 In this case control dependence of C' is same as control
760 dependence of B in original CFG except for block B itself.
761 (since C' postdominate B in modified CFG)
762
763 Now how to decide what case happens? There are two basic options:
764
765 a) C postdominate B. Then C immediately postdominate B and
766 case 2 happens iff there is no other way from B to C except
767 the edge B->C.
768
769 There is other way from B to C iff there is succesor of B that
770 is not postdominated by B. Testing this condition is somewhat
771 expensive, because we need to iterate all succesors of B.
772 We are safe to assume that this does not happen: we will mark B
773 as needed when processing the other path from B to C that is
774 conrol dependent on B and marking control dependencies of B
775 itself is harmless because they will be processed anyway after
776 processing control statement in B.
777
778 b) C does not postdominate B. Always case 1 happens since there is
779 path from C to exit that does not go through B and thus also C'. */
780
781 if (aggressive && !degenerate_phi_p (stmt))
782 {
783 for (k = 0; k < gimple_phi_num_args (stmt); k++)
784 {
785 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
786
787 if (gimple_bb (stmt)
788 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
789 {
790 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
791 mark_last_stmt_necessary (arg_bb);
792 }
793 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
794 && !bitmap_bit_p (visited_control_parents,
795 arg_bb->index))
796 mark_control_dependent_edges_necessary (arg_bb, true);
797 }
798 }
799 }
800 else
801 {
802 /* Propagate through the operands. Examine all the USE, VUSE and
803 VDEF operands in this statement. Mark all the statements
804 which feed this statement's uses as necessary. */
805 ssa_op_iter iter;
806 tree use;
807
808 /* If this is a call to free which is directly fed by an
809 allocation function do not mark that necessary through
810 processing the argument. */
811 bool is_delete_operator
812 = (is_gimple_call (stmt)
813 && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
814 && gimple_call_operator_delete_p (as_a <gcall *> (stmt)));
815 if (is_delete_operator
816 || gimple_call_builtin_p (stmt, BUILT_IN_FREE)
817 || gimple_call_builtin_p (stmt, BUILT_IN_GOMP_FREE))
818 {
819 tree ptr = gimple_call_arg (stmt, 0);
820 gcall *def_stmt;
821 tree def_callee;
822 /* If the pointer we free is defined by an allocation
823 function do not add the call to the worklist. */
824 if (TREE_CODE (ptr) == SSA_NAME
825 && (def_stmt = dyn_cast <gcall *> (SSA_NAME_DEF_STMT (ptr)))
826 && (def_callee = gimple_call_fndecl (def_stmt))
827 && ((DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
828 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
829 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
830 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC
831 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_GOMP_ALLOC))
832 || (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (def_callee)
833 && gimple_call_from_new_or_delete (def_stmt))))
834 {
835 if (is_delete_operator
836 && !valid_new_delete_pair_p (def_stmt, stmt))
837 mark_operand_necessary (gimple_call_arg (stmt, 0));
838
839 /* Delete operators can have alignment and (or) size
840 as next arguments. When being a SSA_NAME, they
841 must be marked as necessary. Similarly GOMP_free. */
842 if (gimple_call_num_args (stmt) >= 2)
843 for (unsigned i = 1; i < gimple_call_num_args (stmt);
844 i++)
845 {
846 tree arg = gimple_call_arg (stmt, i);
847 if (TREE_CODE (arg) == SSA_NAME)
848 mark_operand_necessary (arg);
849 }
850
851 continue;
852 }
853 }
854
855 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
856 mark_operand_necessary (use);
857
858 use = gimple_vuse (stmt);
859 if (!use)
860 continue;
861
862 /* No need to search for vdefs if we intrinsicly keep them all. */
863 if (keep_all_vdefs_p ())
864 continue;
865
866 /* If we dropped to simple mode make all immediately
867 reachable definitions necessary. */
868 if (chain_ovfl)
869 {
870 mark_all_reaching_defs_necessary (stmt);
871 continue;
872 }
873
874 /* For statements that may load from memory (have a VUSE) we
875 have to mark all reaching (may-)definitions as necessary.
876 We partition this task into two cases:
877 1) explicit loads based on decls that are not aliased
878 2) implicit loads (like calls) and explicit loads not
879 based on decls that are not aliased (like indirect
880 references or loads from globals)
881 For 1) we mark all reaching may-defs as necessary, stopping
882 at dominating kills. For 2) we want to mark all dominating
883 references necessary, but non-aliased ones which we handle
884 in 1). By keeping a global visited bitmap for references
885 we walk for 2) we avoid quadratic behavior for those. */
886
887 if (gcall *call = dyn_cast <gcall *> (stmt))
888 {
889 tree callee = gimple_call_fndecl (call);
890 unsigned i;
891
892 /* Calls to functions that are merely acting as barriers
893 or that only store to memory do not make any previous
894 stores necessary. */
895 if (callee != NULL_TREE
896 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
897 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
898 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
899 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
900 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
901 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
902 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
903 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
904 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
905 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
906 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
907 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
908 continue;
909
910 if (callee != NULL_TREE
911 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
912 || DECL_IS_OPERATOR_DELETE_P (callee))
913 && gimple_call_from_new_or_delete (call))
914 continue;
915
916 /* Calls implicitly load from memory, their arguments
917 in addition may explicitly perform memory loads. */
918 mark_all_reaching_defs_necessary (call);
919 for (i = 0; i < gimple_call_num_args (call); ++i)
920 {
921 tree arg = gimple_call_arg (call, i);
922 if (TREE_CODE (arg) == SSA_NAME
923 || is_gimple_min_invariant (arg))
924 continue;
925 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
926 arg = TREE_OPERAND (arg, 0);
927 if (!ref_may_be_aliased (arg))
928 mark_aliased_reaching_defs_necessary (call, arg);
929 }
930 }
931 else if (gimple_assign_single_p (stmt))
932 {
933 tree rhs;
934 /* If this is a load mark things necessary. */
935 rhs = gimple_assign_rhs1 (stmt);
936 if (TREE_CODE (rhs) != SSA_NAME
937 && !is_gimple_min_invariant (rhs)
938 && TREE_CODE (rhs) != CONSTRUCTOR)
939 {
940 if (!ref_may_be_aliased (rhs))
941 mark_aliased_reaching_defs_necessary (stmt, rhs);
942 else
943 mark_all_reaching_defs_necessary (stmt);
944 }
945 }
946 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
947 {
948 tree rhs = gimple_return_retval (return_stmt);
949 /* A return statement may perform a load. */
950 if (rhs
951 && TREE_CODE (rhs) != SSA_NAME
952 && !is_gimple_min_invariant (rhs)
953 && TREE_CODE (rhs) != CONSTRUCTOR)
954 {
955 if (!ref_may_be_aliased (rhs))
956 mark_aliased_reaching_defs_necessary (stmt, rhs);
957 else
958 mark_all_reaching_defs_necessary (stmt);
959 }
960 }
961 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
962 {
963 unsigned i;
964 mark_all_reaching_defs_necessary (stmt);
965 /* Inputs may perform loads. */
966 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
967 {
968 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
969 if (TREE_CODE (op) != SSA_NAME
970 && !is_gimple_min_invariant (op)
971 && TREE_CODE (op) != CONSTRUCTOR
972 && !ref_may_be_aliased (op))
973 mark_aliased_reaching_defs_necessary (stmt, op);
974 }
975 }
976 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
977 {
978 /* The beginning of a transaction is a memory barrier. */
979 /* ??? If we were really cool, we'd only be a barrier
980 for the memories touched within the transaction. */
981 mark_all_reaching_defs_necessary (stmt);
982 }
983 else
984 gcc_unreachable ();
985
986 /* If we over-used our alias oracle budget drop to simple
987 mode. The cost metric allows quadratic behavior
988 (number of uses times number of may-defs queries) up to
989 a constant maximal number of queries and after that falls back to
990 super-linear complexity. */
991 if (/* Constant but quadratic for small functions. */
992 total_chain > 128 * 128
993 /* Linear in the number of may-defs. */
994 && total_chain > 32 * longest_chain
995 /* Linear in the number of uses. */
996 && total_chain > nr_walks * 32)
997 {
998 chain_ovfl = true;
999 if (visited)
1000 bitmap_clear (visited);
1001 }
1002 }
1003 }
1004 }
1005
1006 /* Remove dead PHI nodes from block BB. */
1007
1008 static bool
1009 remove_dead_phis (basic_block bb)
1010 {
1011 bool something_changed = false;
1012 gphi *phi;
1013 gphi_iterator gsi;
1014
1015 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1016 {
1017 stats.total_phis++;
1018 phi = gsi.phi ();
1019
1020 /* We do not track necessity of virtual PHI nodes. Instead do
1021 very simple dead PHI removal here. */
1022 if (virtual_operand_p (gimple_phi_result (phi)))
1023 {
1024 /* Virtual PHI nodes with one or identical arguments
1025 can be removed. */
1026 if (degenerate_phi_p (phi))
1027 {
1028 tree vdef = gimple_phi_result (phi);
1029 tree vuse = gimple_phi_arg_def (phi, 0);
1030
1031 use_operand_p use_p;
1032 imm_use_iterator iter;
1033 gimple *use_stmt;
1034 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1035 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1036 SET_USE (use_p, vuse);
1037 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1038 && TREE_CODE (vuse) == SSA_NAME)
1039 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1040 }
1041 else
1042 gimple_set_plf (phi, STMT_NECESSARY, true);
1043 }
1044
1045 if (!gimple_plf (phi, STMT_NECESSARY))
1046 {
1047 something_changed = true;
1048 if (dump_file && (dump_flags & TDF_DETAILS))
1049 {
1050 fprintf (dump_file, "Deleting : ");
1051 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1052 fprintf (dump_file, "\n");
1053 }
1054
1055 remove_phi_node (&gsi, true);
1056 stats.removed_phis++;
1057 continue;
1058 }
1059
1060 gsi_next (&gsi);
1061 }
1062 return something_changed;
1063 }
1064
1065
1066 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1067 containing I so that we don't have to look it up. */
1068
1069 static void
1070 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
1071 vec<edge> &to_remove_edges)
1072 {
1073 gimple *stmt = gsi_stmt (*i);
1074
1075 if (dump_file && (dump_flags & TDF_DETAILS))
1076 {
1077 fprintf (dump_file, "Deleting : ");
1078 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1079 fprintf (dump_file, "\n");
1080 }
1081
1082 stats.removed++;
1083
1084 /* If we have determined that a conditional branch statement contributes
1085 nothing to the program, then we not only remove it, but we need to update
1086 the CFG. We can chose any of edges out of BB as long as we are sure to not
1087 close infinite loops. This is done by always choosing the edge closer to
1088 exit in inverted_post_order_compute order. */
1089 if (is_ctrl_stmt (stmt))
1090 {
1091 edge_iterator ei;
1092 edge e = NULL, e2;
1093
1094 /* See if there is only one non-abnormal edge. */
1095 if (single_succ_p (bb))
1096 e = single_succ_edge (bb);
1097 /* Otherwise chose one that is closer to bb with live statement in it.
1098 To be able to chose one, we compute inverted post order starting from
1099 all BBs with live statements. */
1100 if (!e)
1101 {
1102 if (!bb_postorder)
1103 {
1104 auto_vec<int, 20> postorder;
1105 inverted_post_order_compute (&postorder,
1106 &bb_contains_live_stmts);
1107 bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1108 for (unsigned int i = 0; i < postorder.length (); ++i)
1109 bb_postorder[postorder[i]] = i;
1110 }
1111 FOR_EACH_EDGE (e2, ei, bb->succs)
1112 if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1113 || bb_postorder [e->dest->index]
1114 < bb_postorder [e2->dest->index])
1115 e = e2;
1116 }
1117 gcc_assert (e);
1118 e->probability = profile_probability::always ();
1119
1120 /* The edge is no longer associated with a conditional, so it does
1121 not have TRUE/FALSE flags.
1122 We are also safe to drop EH/ABNORMAL flags and turn them into
1123 normal control flow, because we know that all the destinations (including
1124 those odd edges) are equivalent for program execution. */
1125 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1126
1127 /* The lone outgoing edge from BB will be a fallthru edge. */
1128 e->flags |= EDGE_FALLTHRU;
1129
1130 /* Remove the remaining outgoing edges. */
1131 FOR_EACH_EDGE (e2, ei, bb->succs)
1132 if (e != e2)
1133 {
1134 /* If we made a BB unconditionally exit a loop or removed
1135 an entry into an irreducible region, then this transform
1136 alters the set of BBs in the loop. Schedule a fixup. */
1137 if (loop_exit_edge_p (bb->loop_father, e)
1138 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1139 loops_state_set (LOOPS_NEED_FIXUP);
1140 to_remove_edges.safe_push (e2);
1141 }
1142 }
1143
1144 /* If this is a store into a variable that is being optimized away,
1145 add a debug bind stmt if possible. */
1146 if (MAY_HAVE_DEBUG_BIND_STMTS
1147 && gimple_assign_single_p (stmt)
1148 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1149 {
1150 tree lhs = gimple_assign_lhs (stmt);
1151 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1152 && !DECL_IGNORED_P (lhs)
1153 && is_gimple_reg_type (TREE_TYPE (lhs))
1154 && !is_global_var (lhs)
1155 && !DECL_HAS_VALUE_EXPR_P (lhs))
1156 {
1157 tree rhs = gimple_assign_rhs1 (stmt);
1158 gdebug *note
1159 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1160 gsi_insert_after (i, note, GSI_SAME_STMT);
1161 }
1162 }
1163
1164 unlink_stmt_vdef (stmt);
1165 gsi_remove (i, true);
1166 release_defs (stmt);
1167 }
1168
1169 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1170 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1171
1172 static tree
1173 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1174 {
1175 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1176 *walk_subtrees = 0;
1177 if (*tp == (tree) data)
1178 return *tp;
1179 return NULL_TREE;
1180 }
1181
1182 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1183 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1184 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1185 uses. */
1186
1187 static void
1188 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1189 enum tree_code subcode)
1190 {
1191 gimple *stmt = gsi_stmt (*gsi);
1192 tree lhs = gimple_call_lhs (stmt);
1193
1194 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1195 return;
1196
1197 imm_use_iterator imm_iter;
1198 use_operand_p use_p;
1199 bool has_debug_uses = false;
1200 bool has_realpart_uses = false;
1201 bool has_other_uses = false;
1202 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1203 {
1204 gimple *use_stmt = USE_STMT (use_p);
1205 if (is_gimple_debug (use_stmt))
1206 has_debug_uses = true;
1207 else if (is_gimple_assign (use_stmt)
1208 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1209 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1210 has_realpart_uses = true;
1211 else
1212 {
1213 has_other_uses = true;
1214 break;
1215 }
1216 }
1217
1218 if (!has_realpart_uses || has_other_uses)
1219 return;
1220
1221 tree arg0 = gimple_call_arg (stmt, 0);
1222 tree arg1 = gimple_call_arg (stmt, 1);
1223 location_t loc = gimple_location (stmt);
1224 tree type = TREE_TYPE (TREE_TYPE (lhs));
1225 tree utype = type;
1226 if (!TYPE_UNSIGNED (type))
1227 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1228 tree result = fold_build2_loc (loc, subcode, utype,
1229 fold_convert_loc (loc, utype, arg0),
1230 fold_convert_loc (loc, utype, arg1));
1231 result = fold_convert_loc (loc, type, result);
1232
1233 if (has_debug_uses)
1234 {
1235 gimple *use_stmt;
1236 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1237 {
1238 if (!gimple_debug_bind_p (use_stmt))
1239 continue;
1240 tree v = gimple_debug_bind_get_value (use_stmt);
1241 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1242 {
1243 gimple_debug_bind_reset_value (use_stmt);
1244 update_stmt (use_stmt);
1245 }
1246 }
1247 }
1248
1249 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1250 result = drop_tree_overflow (result);
1251 tree overflow = build_zero_cst (type);
1252 tree ctype = build_complex_type (type);
1253 if (TREE_CODE (result) == INTEGER_CST)
1254 result = build_complex (ctype, result, overflow);
1255 else
1256 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1257 ctype, result, overflow);
1258
1259 if (dump_file && (dump_flags & TDF_DETAILS))
1260 {
1261 fprintf (dump_file, "Transforming call: ");
1262 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1263 fprintf (dump_file, "because the overflow result is never used into: ");
1264 print_generic_stmt (dump_file, result, TDF_SLIM);
1265 fprintf (dump_file, "\n");
1266 }
1267
1268 gimplify_and_update_call_from_tree (gsi, result);
1269 }
1270
1271 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1272 contributes nothing to the program, and can be deleted. */
1273
1274 static bool
1275 eliminate_unnecessary_stmts (void)
1276 {
1277 bool something_changed = false;
1278 basic_block bb;
1279 gimple_stmt_iterator gsi, psi;
1280 gimple *stmt;
1281 tree call;
1282 auto_vec<edge> to_remove_edges;
1283
1284 if (dump_file && (dump_flags & TDF_DETAILS))
1285 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1286
1287 clear_special_calls ();
1288
1289 /* Walking basic blocks and statements in reverse order avoids
1290 releasing SSA names before any other DEFs that refer to them are
1291 released. This helps avoid loss of debug information, as we get
1292 a chance to propagate all RHSs of removed SSAs into debug uses,
1293 rather than only the latest ones. E.g., consider:
1294
1295 x_3 = y_1 + z_2;
1296 a_5 = x_3 - b_4;
1297 # DEBUG a => a_5
1298
1299 If we were to release x_3 before a_5, when we reached a_5 and
1300 tried to substitute it into the debug stmt, we'd see x_3 there,
1301 but x_3's DEF, type, etc would have already been disconnected.
1302 By going backwards, the debug stmt first changes to:
1303
1304 # DEBUG a => x_3 - b_4
1305
1306 and then to:
1307
1308 # DEBUG a => y_1 + z_2 - b_4
1309
1310 as desired. */
1311 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1312 auto_vec<basic_block> h;
1313 h = get_all_dominated_blocks (CDI_DOMINATORS,
1314 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1315
1316 while (h.length ())
1317 {
1318 bb = h.pop ();
1319
1320 /* Remove dead statements. */
1321 auto_bitmap debug_seen;
1322 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1323 {
1324 stmt = gsi_stmt (gsi);
1325
1326 psi = gsi;
1327 gsi_prev (&psi);
1328
1329 stats.total++;
1330
1331 /* We can mark a call to free as not necessary if the
1332 defining statement of its argument is not necessary
1333 (and thus is getting removed). */
1334 if (gimple_plf (stmt, STMT_NECESSARY)
1335 && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1336 || (is_gimple_call (stmt)
1337 && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
1338 && gimple_call_operator_delete_p (as_a <gcall *> (stmt)))))
1339 {
1340 tree ptr = gimple_call_arg (stmt, 0);
1341 if (TREE_CODE (ptr) == SSA_NAME)
1342 {
1343 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1344 if (!gimple_nop_p (def_stmt)
1345 && !gimple_plf (def_stmt, STMT_NECESSARY))
1346 gimple_set_plf (stmt, STMT_NECESSARY, false);
1347 }
1348 }
1349
1350 /* If GSI is not necessary then remove it. */
1351 if (!gimple_plf (stmt, STMT_NECESSARY))
1352 {
1353 /* Keep clobbers that we can keep live live. */
1354 if (gimple_clobber_p (stmt))
1355 {
1356 ssa_op_iter iter;
1357 use_operand_p use_p;
1358 bool dead = false;
1359 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1360 {
1361 tree name = USE_FROM_PTR (use_p);
1362 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1363 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1364 {
1365 dead = true;
1366 break;
1367 }
1368 }
1369 if (!dead)
1370 {
1371 bitmap_clear (debug_seen);
1372 continue;
1373 }
1374 }
1375 if (!is_gimple_debug (stmt))
1376 something_changed = true;
1377 remove_dead_stmt (&gsi, bb, to_remove_edges);
1378 continue;
1379 }
1380 else if (is_gimple_call (stmt))
1381 {
1382 tree name = gimple_call_lhs (stmt);
1383
1384 notice_special_calls (as_a <gcall *> (stmt));
1385
1386 /* When LHS of var = call (); is dead, simplify it into
1387 call (); saving one operand. */
1388 if (name
1389 && TREE_CODE (name) == SSA_NAME
1390 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1391 /* Avoid doing so for allocation calls which we
1392 did not mark as necessary, it will confuse the
1393 special logic we apply to malloc/free pair removal. */
1394 && (!(call = gimple_call_fndecl (stmt))
1395 || ((DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1396 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1397 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1398 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1399 && !ALLOCA_FUNCTION_CODE_P
1400 (DECL_FUNCTION_CODE (call))))
1401 && !DECL_IS_REPLACEABLE_OPERATOR_NEW_P (call))))
1402 {
1403 something_changed = true;
1404 if (dump_file && (dump_flags & TDF_DETAILS))
1405 {
1406 fprintf (dump_file, "Deleting LHS of call: ");
1407 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1408 fprintf (dump_file, "\n");
1409 }
1410
1411 gimple_call_set_lhs (stmt, NULL_TREE);
1412 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1413 update_stmt (stmt);
1414 release_ssa_name (name);
1415
1416 /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1417 without lhs is not needed. */
1418 if (gimple_call_internal_p (stmt))
1419 switch (gimple_call_internal_fn (stmt))
1420 {
1421 case IFN_GOMP_SIMD_LANE:
1422 if (gimple_call_num_args (stmt) >= 3
1423 && !integer_nonzerop (gimple_call_arg (stmt, 2)))
1424 break;
1425 /* FALLTHRU */
1426 case IFN_ASAN_POISON:
1427 remove_dead_stmt (&gsi, bb, to_remove_edges);
1428 break;
1429 default:
1430 break;
1431 }
1432 }
1433 else if (gimple_call_internal_p (stmt))
1434 switch (gimple_call_internal_fn (stmt))
1435 {
1436 case IFN_ADD_OVERFLOW:
1437 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1438 break;
1439 case IFN_SUB_OVERFLOW:
1440 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1441 break;
1442 case IFN_MUL_OVERFLOW:
1443 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1444 break;
1445 default:
1446 break;
1447 }
1448 }
1449 else if (gimple_debug_bind_p (stmt))
1450 {
1451 /* We are only keeping the last debug-bind of a
1452 non-DEBUG_EXPR_DECL variable in a series of
1453 debug-bind stmts. */
1454 tree var = gimple_debug_bind_get_var (stmt);
1455 if (TREE_CODE (var) != DEBUG_EXPR_DECL
1456 && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1457 remove_dead_stmt (&gsi, bb, to_remove_edges);
1458 continue;
1459 }
1460 bitmap_clear (debug_seen);
1461 }
1462
1463 /* Remove dead PHI nodes. */
1464 something_changed |= remove_dead_phis (bb);
1465 }
1466
1467
1468 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1469 rendered some PHI nodes unreachable while they are still in use.
1470 Mark them for renaming. */
1471 if (!to_remove_edges.is_empty ())
1472 {
1473 basic_block prev_bb;
1474
1475 /* Remove edges. We've delayed this to not get bogus debug stmts
1476 during PHI node removal. */
1477 for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1478 remove_edge (to_remove_edges[i]);
1479 cfg_altered = true;
1480
1481 find_unreachable_blocks ();
1482
1483 /* Delete all unreachable basic blocks in reverse dominator order. */
1484 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1485 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1486 {
1487 prev_bb = bb->prev_bb;
1488
1489 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1490 || !(bb->flags & BB_REACHABLE))
1491 {
1492 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1493 gsi_next (&gsi))
1494 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1495 {
1496 bool found = false;
1497 imm_use_iterator iter;
1498
1499 FOR_EACH_IMM_USE_STMT (stmt, iter,
1500 gimple_phi_result (gsi.phi ()))
1501 {
1502 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1503 continue;
1504 if (gimple_code (stmt) == GIMPLE_PHI
1505 || gimple_plf (stmt, STMT_NECESSARY))
1506 {
1507 found = true;
1508 break;
1509 }
1510 }
1511 if (found)
1512 mark_virtual_phi_result_for_renaming (gsi.phi ());
1513 }
1514
1515 if (!(bb->flags & BB_REACHABLE))
1516 {
1517 /* Speed up the removal of blocks that don't
1518 dominate others. Walking backwards, this should
1519 be the common case. ??? Do we need to recompute
1520 dominators because of cfg_altered? */
1521 if (!first_dom_son (CDI_DOMINATORS, bb))
1522 delete_basic_block (bb);
1523 else
1524 {
1525 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1526
1527 while (h.length ())
1528 {
1529 bb = h.pop ();
1530 prev_bb = bb->prev_bb;
1531 /* Rearrangements to the CFG may have failed
1532 to update the dominators tree, so that
1533 formerly-dominated blocks are now
1534 otherwise reachable. */
1535 if (!!(bb->flags & BB_REACHABLE))
1536 continue;
1537 delete_basic_block (bb);
1538 }
1539
1540 h.release ();
1541 }
1542 }
1543 }
1544 }
1545 }
1546
1547 if (bb_postorder)
1548 free (bb_postorder);
1549 bb_postorder = NULL;
1550
1551 return something_changed;
1552 }
1553
1554
1555 /* Print out removed statement statistics. */
1556
1557 static void
1558 print_stats (void)
1559 {
1560 float percg;
1561
1562 percg = ((float) stats.removed / (float) stats.total) * 100;
1563 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1564 stats.removed, stats.total, (int) percg);
1565
1566 if (stats.total_phis == 0)
1567 percg = 0;
1568 else
1569 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1570
1571 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1572 stats.removed_phis, stats.total_phis, (int) percg);
1573 }
1574
1575 /* Initialization for this pass. Set up the used data structures. */
1576
1577 static void
1578 tree_dce_init (bool aggressive)
1579 {
1580 memset ((void *) &stats, 0, sizeof (stats));
1581
1582 if (aggressive)
1583 {
1584 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1585 bitmap_clear (last_stmt_necessary);
1586 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1587 bitmap_clear (bb_contains_live_stmts);
1588 }
1589
1590 processed = sbitmap_alloc (num_ssa_names + 1);
1591 bitmap_clear (processed);
1592
1593 worklist.create (64);
1594 cfg_altered = false;
1595 }
1596
1597 /* Cleanup after this pass. */
1598
1599 static void
1600 tree_dce_done (bool aggressive)
1601 {
1602 if (aggressive)
1603 {
1604 delete cd;
1605 sbitmap_free (visited_control_parents);
1606 sbitmap_free (last_stmt_necessary);
1607 sbitmap_free (bb_contains_live_stmts);
1608 bb_contains_live_stmts = NULL;
1609 }
1610
1611 sbitmap_free (processed);
1612
1613 worklist.release ();
1614 }
1615
1616 /* Sort PHI argument values for make_forwarders_with_degenerate_phis. */
1617
1618 static int
1619 sort_phi_args (const void *a_, const void *b_)
1620 {
1621 auto *a = (const std::pair<edge, hashval_t> *) a_;
1622 auto *b = (const std::pair<edge, hashval_t> *) b_;
1623 hashval_t ha = a->second;
1624 hashval_t hb = b->second;
1625 if (ha < hb)
1626 return -1;
1627 else if (ha > hb)
1628 return 1;
1629 else
1630 return 0;
1631 }
1632
1633 /* Look for a non-virtual PHIs and make a forwarder block when all PHIs
1634 have the same argument on a set of edges. This is to not consider
1635 control dependences of individual edges for same values but only for
1636 the common set. */
1637
1638 static unsigned
1639 make_forwarders_with_degenerate_phis (function *fn)
1640 {
1641 unsigned todo = 0;
1642
1643 basic_block bb;
1644 FOR_EACH_BB_FN (bb, fn)
1645 {
1646 /* Only PHIs with three or more arguments have opportunities. */
1647 if (EDGE_COUNT (bb->preds) < 3)
1648 continue;
1649 /* Do not touch loop headers. */
1650 if (bb->loop_father->header == bb)
1651 continue;
1652
1653 /* Take one PHI node as template to look for identical
1654 arguments. Build a vector of candidates forming sets
1655 of argument edges with equal values. Note optimality
1656 depends on the particular choice of the template PHI
1657 since equal arguments are unordered leaving other PHIs
1658 with more than one set of equal arguments within this
1659 argument range unsorted. We'd have to break ties by
1660 looking at other PHI nodes. */
1661 gphi_iterator gsi = gsi_start_nonvirtual_phis (bb);
1662 if (gsi_end_p (gsi))
1663 continue;
1664 gphi *phi = gsi.phi ();
1665 auto_vec<std::pair<edge, hashval_t>, 8> args;
1666 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
1667 {
1668 edge e = gimple_phi_arg_edge (phi, i);
1669 /* Skip abnormal edges since we cannot redirect them. */
1670 if (e->flags & EDGE_ABNORMAL)
1671 continue;
1672 /* Skip loop exit edges when we are in loop-closed SSA form
1673 since the forwarder we'd create does not have a PHI node. */
1674 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
1675 && loop_exit_edge_p (e->src->loop_father, e))
1676 continue;
1677 args.safe_push (std::make_pair (e, iterative_hash_expr
1678 (gimple_phi_arg_def (phi, i), 0)));
1679 }
1680 if (args.length () < 2)
1681 continue;
1682 args.qsort (sort_phi_args);
1683
1684 /* From the candidates vector now verify true candidates for
1685 forwarders and create them. */
1686 gphi *vphi = get_virtual_phi (bb);
1687 unsigned start = 0;
1688 while (start < args.length () - 1)
1689 {
1690 unsigned i;
1691 for (i = start + 1; i < args.length (); ++i)
1692 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, args[start].first),
1693 PHI_ARG_DEF_FROM_EDGE (phi, args[i].first)))
1694 break;
1695 /* args[start]..args[i-1] are equal. */
1696 if (start != i - 1)
1697 {
1698 /* Check all PHI nodes for argument equality. */
1699 bool equal = true;
1700 gphi_iterator gsi2 = gsi;
1701 gsi_next (&gsi2);
1702 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
1703 {
1704 gphi *phi2 = gsi2.phi ();
1705 if (virtual_operand_p (gimple_phi_result (phi2)))
1706 continue;
1707 tree start_arg
1708 = PHI_ARG_DEF_FROM_EDGE (phi2, args[start].first);
1709 for (unsigned j = start + 1; j < i; ++j)
1710 {
1711 if (!operand_equal_p (start_arg,
1712 PHI_ARG_DEF_FROM_EDGE
1713 (phi2, args[j].first)))
1714 {
1715 /* Another PHI might have a shorter set of
1716 equivalent args. Go for that. */
1717 i = j;
1718 if (j == start + 1)
1719 equal = false;
1720 break;
1721 }
1722 }
1723 if (!equal)
1724 break;
1725 }
1726 if (equal)
1727 {
1728 /* If we are asked to forward all edges the block
1729 has all degenerate PHIs. Do nothing in that case. */
1730 if (start == 0
1731 && i == args.length ()
1732 && args.length () == gimple_phi_num_args (phi))
1733 break;
1734 /* Instead of using make_forwarder_block we are
1735 rolling our own variant knowing that the forwarder
1736 does not need PHI nodes apart from eventually
1737 a virtual one. */
1738 auto_vec<tree, 8> vphi_args;
1739 if (vphi)
1740 {
1741 vphi_args.reserve_exact (i - start);
1742 for (unsigned j = start; j < i; ++j)
1743 vphi_args.quick_push
1744 (PHI_ARG_DEF_FROM_EDGE (vphi, args[j].first));
1745 }
1746 free_dominance_info (fn, CDI_DOMINATORS);
1747 basic_block forwarder = split_edge (args[start].first);
1748 for (unsigned j = start + 1; j < i; ++j)
1749 {
1750 edge e = args[j].first;
1751 redirect_edge_and_branch_force (e, forwarder);
1752 redirect_edge_var_map_clear (e);
1753 }
1754 if (vphi)
1755 {
1756 tree def = copy_ssa_name (vphi_args[0]);
1757 gphi *vphi_copy = create_phi_node (def, forwarder);
1758 for (unsigned j = start; j < i; ++j)
1759 add_phi_arg (vphi_copy, vphi_args[j - start],
1760 args[j].first, UNKNOWN_LOCATION);
1761 SET_PHI_ARG_DEF
1762 (vphi, single_succ_edge (forwarder)->dest_idx, def);
1763 }
1764 todo |= TODO_cleanup_cfg;
1765 }
1766 }
1767 /* Continue searching for more opportunities. */
1768 start = i;
1769 }
1770 }
1771 return todo;
1772 }
1773
1774 /* Main routine to eliminate dead code.
1775
1776 AGGRESSIVE controls the aggressiveness of the algorithm.
1777 In conservative mode, we ignore control dependence and simply declare
1778 all but the most trivially dead branches necessary. This mode is fast.
1779 In aggressive mode, control dependences are taken into account, which
1780 results in more dead code elimination, but at the cost of some time.
1781
1782 FIXME: Aggressive mode before PRE doesn't work currently because
1783 the dominance info is not invalidated after DCE1. This is
1784 not an issue right now because we only run aggressive DCE
1785 as the last tree SSA pass, but keep this in mind when you
1786 start experimenting with pass ordering. */
1787
1788 static unsigned int
1789 perform_tree_ssa_dce (bool aggressive)
1790 {
1791 bool something_changed = 0;
1792 unsigned todo = 0;
1793
1794 /* Preheaders are needed for SCEV to work.
1795 Simple lateches and recorded exits improve chances that loop will
1796 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1797 bool in_loop_pipeline = scev_initialized_p ();
1798 if (aggressive && ! in_loop_pipeline)
1799 {
1800 scev_initialize ();
1801 loop_optimizer_init (LOOPS_NORMAL
1802 | LOOPS_HAVE_RECORDED_EXITS);
1803 }
1804
1805 if (aggressive)
1806 todo |= make_forwarders_with_degenerate_phis (cfun);
1807
1808 calculate_dominance_info (CDI_DOMINATORS);
1809
1810 tree_dce_init (aggressive);
1811
1812 if (aggressive)
1813 {
1814 /* Compute control dependence. */
1815 calculate_dominance_info (CDI_POST_DOMINATORS);
1816 cd = new control_dependences ();
1817
1818 visited_control_parents =
1819 sbitmap_alloc (last_basic_block_for_fn (cfun));
1820 bitmap_clear (visited_control_parents);
1821
1822 mark_dfs_back_edges ();
1823 }
1824
1825 find_obviously_necessary_stmts (aggressive);
1826
1827 if (aggressive && ! in_loop_pipeline)
1828 {
1829 loop_optimizer_finalize ();
1830 scev_finalize ();
1831 }
1832
1833 longest_chain = 0;
1834 total_chain = 0;
1835 nr_walks = 0;
1836 chain_ovfl = false;
1837 visited = BITMAP_ALLOC (NULL);
1838 propagate_necessity (aggressive);
1839 BITMAP_FREE (visited);
1840
1841 something_changed |= eliminate_unnecessary_stmts ();
1842 something_changed |= cfg_altered;
1843
1844 /* We do not update postdominators, so free them unconditionally. */
1845 free_dominance_info (CDI_POST_DOMINATORS);
1846
1847 /* If we removed paths in the CFG, then we need to update
1848 dominators as well. I haven't investigated the possibility
1849 of incrementally updating dominators. */
1850 if (cfg_altered)
1851 free_dominance_info (CDI_DOMINATORS);
1852
1853 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1854 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1855
1856 /* Debugging dumps. */
1857 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1858 print_stats ();
1859
1860 tree_dce_done (aggressive);
1861
1862 if (something_changed)
1863 {
1864 free_numbers_of_iterations_estimates (cfun);
1865 if (in_loop_pipeline)
1866 scev_reset ();
1867 todo |= TODO_update_ssa | TODO_cleanup_cfg;
1868 }
1869 return todo;
1870 }
1871
1872 /* Pass entry points. */
1873 static unsigned int
1874 tree_ssa_dce (void)
1875 {
1876 return perform_tree_ssa_dce (/*aggressive=*/false);
1877 }
1878
1879 static unsigned int
1880 tree_ssa_cd_dce (void)
1881 {
1882 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1883 }
1884
1885 namespace {
1886
1887 const pass_data pass_data_dce =
1888 {
1889 GIMPLE_PASS, /* type */
1890 "dce", /* name */
1891 OPTGROUP_NONE, /* optinfo_flags */
1892 TV_TREE_DCE, /* tv_id */
1893 ( PROP_cfg | PROP_ssa ), /* properties_required */
1894 0, /* properties_provided */
1895 0, /* properties_destroyed */
1896 0, /* todo_flags_start */
1897 0, /* todo_flags_finish */
1898 };
1899
1900 class pass_dce : public gimple_opt_pass
1901 {
1902 public:
1903 pass_dce (gcc::context *ctxt)
1904 : gimple_opt_pass (pass_data_dce, ctxt)
1905 {}
1906
1907 /* opt_pass methods: */
1908 opt_pass * clone () { return new pass_dce (m_ctxt); }
1909 virtual bool gate (function *) { return flag_tree_dce != 0; }
1910 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1911
1912 }; // class pass_dce
1913
1914 } // anon namespace
1915
1916 gimple_opt_pass *
1917 make_pass_dce (gcc::context *ctxt)
1918 {
1919 return new pass_dce (ctxt);
1920 }
1921
1922 namespace {
1923
1924 const pass_data pass_data_cd_dce =
1925 {
1926 GIMPLE_PASS, /* type */
1927 "cddce", /* name */
1928 OPTGROUP_NONE, /* optinfo_flags */
1929 TV_TREE_CD_DCE, /* tv_id */
1930 ( PROP_cfg | PROP_ssa ), /* properties_required */
1931 0, /* properties_provided */
1932 0, /* properties_destroyed */
1933 0, /* todo_flags_start */
1934 0, /* todo_flags_finish */
1935 };
1936
1937 class pass_cd_dce : public gimple_opt_pass
1938 {
1939 public:
1940 pass_cd_dce (gcc::context *ctxt)
1941 : gimple_opt_pass (pass_data_cd_dce, ctxt), update_address_taken_p (false)
1942 {}
1943
1944 /* opt_pass methods: */
1945 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1946 void set_pass_param (unsigned n, bool param)
1947 {
1948 gcc_assert (n == 0);
1949 update_address_taken_p = param;
1950 }
1951 virtual bool gate (function *) { return flag_tree_dce != 0; }
1952 virtual unsigned int execute (function *)
1953 {
1954 return (tree_ssa_cd_dce ()
1955 | (update_address_taken_p ? TODO_update_address_taken : 0));
1956 }
1957
1958 private:
1959 bool update_address_taken_p;
1960 }; // class pass_cd_dce
1961
1962 } // anon namespace
1963
1964 gimple_opt_pass *
1965 make_pass_cd_dce (gcc::context *ctxt)
1966 {
1967 return new pass_cd_dce (ctxt);
1968 }
1969
1970
1971 /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
1972 is consumed by this function. The function has linear complexity in
1973 the number of dead stmts with a constant factor like the average SSA
1974 use operands number. */
1975
1976 void
1977 simple_dce_from_worklist (bitmap worklist)
1978 {
1979 while (! bitmap_empty_p (worklist))
1980 {
1981 /* Pop item. */
1982 unsigned i = bitmap_first_set_bit (worklist);
1983 bitmap_clear_bit (worklist, i);
1984
1985 tree def = ssa_name (i);
1986 /* Removed by somebody else or still in use. */
1987 if (! def || ! has_zero_uses (def))
1988 continue;
1989
1990 gimple *t = SSA_NAME_DEF_STMT (def);
1991 if (gimple_has_side_effects (t))
1992 continue;
1993
1994 /* Don't remove statements that are needed for non-call
1995 eh to work. */
1996 if (stmt_unremovable_because_of_non_call_eh_p (cfun, t))
1997 continue;
1998
1999 /* Add uses to the worklist. */
2000 ssa_op_iter iter;
2001 use_operand_p use_p;
2002 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
2003 {
2004 tree use = USE_FROM_PTR (use_p);
2005 if (TREE_CODE (use) == SSA_NAME
2006 && ! SSA_NAME_IS_DEFAULT_DEF (use))
2007 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
2008 }
2009
2010 /* Remove stmt. */
2011 if (dump_file && (dump_flags & TDF_DETAILS))
2012 {
2013 fprintf (dump_file, "Removing dead stmt:");
2014 print_gimple_stmt (dump_file, t, 0);
2015 }
2016 gimple_stmt_iterator gsi = gsi_for_stmt (t);
2017 if (gimple_code (t) == GIMPLE_PHI)
2018 remove_phi_node (&gsi, true);
2019 else
2020 {
2021 gsi_remove (&gsi, true);
2022 release_defs (t);
2023 }
2024 }
2025 }